diff options
author | Linus Torvalds <torvalds@g5.osdl.org> | 2006-06-26 14:03:29 -0400 |
---|---|---|
committer | Linus Torvalds <torvalds@g5.osdl.org> | 2006-06-26 14:03:29 -0400 |
commit | 972d19e837833b93466c6f6a8ef2a7d653000aa3 (patch) | |
tree | 069258492d5347cf440b8240dadfa20621f54842 | |
parent | cdf4f383a4b0ffbf458f65380ecffbeee1f79841 (diff) | |
parent | b9d0a25a484a90c1d60b974d115eff2fe580ce16 (diff) |
Merge master.kernel.org:/pub/scm/linux/kernel/git/herbert/crypto-2.6
* master.kernel.org:/pub/scm/linux/kernel/git/herbert/crypto-2.6:
[CRYPTO] tcrypt: Forbid tcrypt from being built-in
[CRYPTO] aes: Add wrappers for assembly routines
[CRYPTO] tcrypt: Speed benchmark support for digest algorithms
[CRYPTO] tcrypt: Return -EAGAIN from module_init()
[CRYPTO] api: Allow replacement when registering new algorithms
[CRYPTO] api: Removed const from cra_name/cra_driver_name
[CRYPTO] api: Added cra_init/cra_exit
[CRYPTO] api: Fixed incorrect passing of context instead of tfm
[CRYPTO] padlock: Rearrange context structure to reduce code size
[CRYPTO] all: Pass tfm instead of ctx to algorithms
[CRYPTO] digest: Remove unnecessary zeroing during init
[CRYPTO] aes-i586: Get rid of useless function wrappers
[CRYPTO] digest: Add alignment handling
[CRYPTO] khazad: Use 32-bit reads on key
41 files changed, 648 insertions, 394 deletions
diff --git a/arch/i386/crypto/aes-i586-asm.S b/arch/i386/crypto/aes-i586-asm.S index 911b15377f2e..f942f0c8f630 100644 --- a/arch/i386/crypto/aes-i586-asm.S +++ b/arch/i386/crypto/aes-i586-asm.S | |||
@@ -36,22 +36,19 @@ | |||
36 | .file "aes-i586-asm.S" | 36 | .file "aes-i586-asm.S" |
37 | .text | 37 | .text |
38 | 38 | ||
39 | // aes_rval aes_enc_blk(const unsigned char in_blk[], unsigned char out_blk[], const aes_ctx cx[1])// | 39 | #include <asm/asm-offsets.h> |
40 | // aes_rval aes_dec_blk(const unsigned char in_blk[], unsigned char out_blk[], const aes_ctx cx[1])// | ||
41 | |||
42 | #define tlen 1024 // length of each of 4 'xor' arrays (256 32-bit words) | ||
43 | 40 | ||
44 | // offsets to parameters with one register pushed onto stack | 41 | #define tlen 1024 // length of each of 4 'xor' arrays (256 32-bit words) |
45 | |||
46 | #define in_blk 8 // input byte array address parameter | ||
47 | #define out_blk 12 // output byte array address parameter | ||
48 | #define ctx 16 // AES context structure | ||
49 | 42 | ||
50 | // offsets in context structure | 43 | /* offsets to parameters with one register pushed onto stack */ |
44 | #define tfm 8 | ||
45 | #define out_blk 12 | ||
46 | #define in_blk 16 | ||
51 | 47 | ||
52 | #define ekey 0 // encryption key schedule base address | 48 | /* offsets in crypto_tfm structure */ |
53 | #define nrnd 256 // number of rounds | 49 | #define ekey (crypto_tfm_ctx_offset + 0) |
54 | #define dkey 260 // decryption key schedule base address | 50 | #define nrnd (crypto_tfm_ctx_offset + 256) |
51 | #define dkey (crypto_tfm_ctx_offset + 260) | ||
55 | 52 | ||
56 | // register mapping for encrypt and decrypt subroutines | 53 | // register mapping for encrypt and decrypt subroutines |
57 | 54 | ||
@@ -220,6 +217,7 @@ | |||
220 | do_col (table, r5,r0,r1,r4, r2,r3); /* idx=r5 */ | 217 | do_col (table, r5,r0,r1,r4, r2,r3); /* idx=r5 */ |
221 | 218 | ||
222 | // AES (Rijndael) Encryption Subroutine | 219 | // AES (Rijndael) Encryption Subroutine |
220 | /* void aes_enc_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */ | ||
223 | 221 | ||
224 | .global aes_enc_blk | 222 | .global aes_enc_blk |
225 | 223 | ||
@@ -230,7 +228,7 @@ | |||
230 | 228 | ||
231 | aes_enc_blk: | 229 | aes_enc_blk: |
232 | push %ebp | 230 | push %ebp |
233 | mov ctx(%esp),%ebp // pointer to context | 231 | mov tfm(%esp),%ebp |
234 | 232 | ||
235 | // CAUTION: the order and the values used in these assigns | 233 | // CAUTION: the order and the values used in these assigns |
236 | // rely on the register mappings | 234 | // rely on the register mappings |
@@ -295,6 +293,7 @@ aes_enc_blk: | |||
295 | ret | 293 | ret |
296 | 294 | ||
297 | // AES (Rijndael) Decryption Subroutine | 295 | // AES (Rijndael) Decryption Subroutine |
296 | /* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */ | ||
298 | 297 | ||
299 | .global aes_dec_blk | 298 | .global aes_dec_blk |
300 | 299 | ||
@@ -305,7 +304,7 @@ aes_enc_blk: | |||
305 | 304 | ||
306 | aes_dec_blk: | 305 | aes_dec_blk: |
307 | push %ebp | 306 | push %ebp |
308 | mov ctx(%esp),%ebp // pointer to context | 307 | mov tfm(%esp),%ebp |
309 | 308 | ||
310 | // CAUTION: the order and the values used in these assigns | 309 | // CAUTION: the order and the values used in these assigns |
311 | // rely on the register mappings | 310 | // rely on the register mappings |
diff --git a/arch/i386/crypto/aes.c b/arch/i386/crypto/aes.c index a50397b1d5c7..d3806daa3de3 100644 --- a/arch/i386/crypto/aes.c +++ b/arch/i386/crypto/aes.c | |||
@@ -45,8 +45,8 @@ | |||
45 | #include <linux/crypto.h> | 45 | #include <linux/crypto.h> |
46 | #include <linux/linkage.h> | 46 | #include <linux/linkage.h> |
47 | 47 | ||
48 | asmlinkage void aes_enc_blk(const u8 *src, u8 *dst, void *ctx); | 48 | asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src); |
49 | asmlinkage void aes_dec_blk(const u8 *src, u8 *dst, void *ctx); | 49 | asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src); |
50 | 50 | ||
51 | #define AES_MIN_KEY_SIZE 16 | 51 | #define AES_MIN_KEY_SIZE 16 |
52 | #define AES_MAX_KEY_SIZE 32 | 52 | #define AES_MAX_KEY_SIZE 32 |
@@ -378,12 +378,12 @@ static void gen_tabs(void) | |||
378 | k[8*(i)+11] = ss[3]; \ | 378 | k[8*(i)+11] = ss[3]; \ |
379 | } | 379 | } |
380 | 380 | ||
381 | static int | 381 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
382 | aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) | 382 | unsigned int key_len, u32 *flags) |
383 | { | 383 | { |
384 | int i; | 384 | int i; |
385 | u32 ss[8]; | 385 | u32 ss[8]; |
386 | struct aes_ctx *ctx = ctx_arg; | 386 | struct aes_ctx *ctx = crypto_tfm_ctx(tfm); |
387 | const __le32 *key = (const __le32 *)in_key; | 387 | const __le32 *key = (const __le32 *)in_key; |
388 | 388 | ||
389 | /* encryption schedule */ | 389 | /* encryption schedule */ |
@@ -464,16 +464,16 @@ aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) | |||
464 | return 0; | 464 | return 0; |
465 | } | 465 | } |
466 | 466 | ||
467 | static inline void aes_encrypt(void *ctx, u8 *dst, const u8 *src) | 467 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
468 | { | 468 | { |
469 | aes_enc_blk(src, dst, ctx); | 469 | aes_enc_blk(tfm, dst, src); |
470 | } | 470 | } |
471 | static inline void aes_decrypt(void *ctx, u8 *dst, const u8 *src) | 471 | |
472 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | ||
472 | { | 473 | { |
473 | aes_dec_blk(src, dst, ctx); | 474 | aes_dec_blk(tfm, dst, src); |
474 | } | 475 | } |
475 | 476 | ||
476 | |||
477 | static struct crypto_alg aes_alg = { | 477 | static struct crypto_alg aes_alg = { |
478 | .cra_name = "aes", | 478 | .cra_name = "aes", |
479 | .cra_driver_name = "aes-i586", | 479 | .cra_driver_name = "aes-i586", |
diff --git a/arch/i386/kernel/asm-offsets.c b/arch/i386/kernel/asm-offsets.c index 36d66e2077d0..1c3a809e6421 100644 --- a/arch/i386/kernel/asm-offsets.c +++ b/arch/i386/kernel/asm-offsets.c | |||
@@ -4,6 +4,7 @@ | |||
4 | * to extract and format the required data. | 4 | * to extract and format the required data. |
5 | */ | 5 | */ |
6 | 6 | ||
7 | #include <linux/crypto.h> | ||
7 | #include <linux/sched.h> | 8 | #include <linux/sched.h> |
8 | #include <linux/signal.h> | 9 | #include <linux/signal.h> |
9 | #include <linux/personality.h> | 10 | #include <linux/personality.h> |
@@ -69,4 +70,6 @@ void foo(void) | |||
69 | 70 | ||
70 | DEFINE(PAGE_SIZE_asm, PAGE_SIZE); | 71 | DEFINE(PAGE_SIZE_asm, PAGE_SIZE); |
71 | DEFINE(VSYSCALL_BASE, __fix_to_virt(FIX_VSYSCALL)); | 72 | DEFINE(VSYSCALL_BASE, __fix_to_virt(FIX_VSYSCALL)); |
73 | |||
74 | OFFSET(crypto_tfm_ctx_offset, crypto_tfm, __crt_ctx); | ||
72 | } | 75 | } |
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index c5ca2dc5d428..5713c7e5bd16 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c | |||
@@ -37,10 +37,10 @@ struct s390_aes_ctx { | |||
37 | int key_len; | 37 | int key_len; |
38 | }; | 38 | }; |
39 | 39 | ||
40 | static int aes_set_key(void *ctx, const u8 *in_key, unsigned int key_len, | 40 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
41 | u32 *flags) | 41 | unsigned int key_len, u32 *flags) |
42 | { | 42 | { |
43 | struct s390_aes_ctx *sctx = ctx; | 43 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
44 | 44 | ||
45 | switch (key_len) { | 45 | switch (key_len) { |
46 | case 16: | 46 | case 16: |
@@ -70,9 +70,9 @@ fail: | |||
70 | return -EINVAL; | 70 | return -EINVAL; |
71 | } | 71 | } |
72 | 72 | ||
73 | static void aes_encrypt(void *ctx, u8 *out, const u8 *in) | 73 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
74 | { | 74 | { |
75 | const struct s390_aes_ctx *sctx = ctx; | 75 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
76 | 76 | ||
77 | switch (sctx->key_len) { | 77 | switch (sctx->key_len) { |
78 | case 16: | 78 | case 16: |
@@ -90,9 +90,9 @@ static void aes_encrypt(void *ctx, u8 *out, const u8 *in) | |||
90 | } | 90 | } |
91 | } | 91 | } |
92 | 92 | ||
93 | static void aes_decrypt(void *ctx, u8 *out, const u8 *in) | 93 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
94 | { | 94 | { |
95 | const struct s390_aes_ctx *sctx = ctx; | 95 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
96 | 96 | ||
97 | switch (sctx->key_len) { | 97 | switch (sctx->key_len) { |
98 | case 16: | 98 | case 16: |
diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c index e3c37aa0a199..b3f7496a79b4 100644 --- a/arch/s390/crypto/des_s390.c +++ b/arch/s390/crypto/des_s390.c | |||
@@ -44,10 +44,10 @@ struct crypt_s390_des3_192_ctx { | |||
44 | u8 key[DES3_192_KEY_SIZE]; | 44 | u8 key[DES3_192_KEY_SIZE]; |
45 | }; | 45 | }; |
46 | 46 | ||
47 | static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, | 47 | static int des_setkey(struct crypto_tfm *tfm, const u8 *key, |
48 | u32 *flags) | 48 | unsigned int keylen, u32 *flags) |
49 | { | 49 | { |
50 | struct crypt_s390_des_ctx *dctx = ctx; | 50 | struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); |
51 | int ret; | 51 | int ret; |
52 | 52 | ||
53 | /* test if key is valid (not a weak key) */ | 53 | /* test if key is valid (not a weak key) */ |
@@ -57,16 +57,16 @@ static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, | |||
57 | return ret; | 57 | return ret; |
58 | } | 58 | } |
59 | 59 | ||
60 | static void des_encrypt(void *ctx, u8 *out, const u8 *in) | 60 | static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
61 | { | 61 | { |
62 | struct crypt_s390_des_ctx *dctx = ctx; | 62 | struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); |
63 | 63 | ||
64 | crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, out, in, DES_BLOCK_SIZE); | 64 | crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, out, in, DES_BLOCK_SIZE); |
65 | } | 65 | } |
66 | 66 | ||
67 | static void des_decrypt(void *ctx, u8 *out, const u8 *in) | 67 | static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
68 | { | 68 | { |
69 | struct crypt_s390_des_ctx *dctx = ctx; | 69 | struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); |
70 | 70 | ||
71 | crypt_s390_km(KM_DEA_DECRYPT, dctx->key, out, in, DES_BLOCK_SIZE); | 71 | crypt_s390_km(KM_DEA_DECRYPT, dctx->key, out, in, DES_BLOCK_SIZE); |
72 | } | 72 | } |
@@ -166,11 +166,11 @@ static struct crypto_alg des_alg = { | |||
166 | * Implementers MUST reject keys that exhibit this property. | 166 | * Implementers MUST reject keys that exhibit this property. |
167 | * | 167 | * |
168 | */ | 168 | */ |
169 | static int des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, | 169 | static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key, |
170 | u32 *flags) | 170 | unsigned int keylen, u32 *flags) |
171 | { | 171 | { |
172 | int i, ret; | 172 | int i, ret; |
173 | struct crypt_s390_des3_128_ctx *dctx = ctx; | 173 | struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); |
174 | const u8* temp_key = key; | 174 | const u8* temp_key = key; |
175 | 175 | ||
176 | if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) { | 176 | if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) { |
@@ -186,17 +186,17 @@ static int des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, | |||
186 | return 0; | 186 | return 0; |
187 | } | 187 | } |
188 | 188 | ||
189 | static void des3_128_encrypt(void *ctx, u8 *dst, const u8 *src) | 189 | static void des3_128_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
190 | { | 190 | { |
191 | struct crypt_s390_des3_128_ctx *dctx = ctx; | 191 | struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); |
192 | 192 | ||
193 | crypt_s390_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src, | 193 | crypt_s390_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src, |
194 | DES3_128_BLOCK_SIZE); | 194 | DES3_128_BLOCK_SIZE); |
195 | } | 195 | } |
196 | 196 | ||
197 | static void des3_128_decrypt(void *ctx, u8 *dst, const u8 *src) | 197 | static void des3_128_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
198 | { | 198 | { |
199 | struct crypt_s390_des3_128_ctx *dctx = ctx; | 199 | struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); |
200 | 200 | ||
201 | crypt_s390_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src, | 201 | crypt_s390_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src, |
202 | DES3_128_BLOCK_SIZE); | 202 | DES3_128_BLOCK_SIZE); |
@@ -302,11 +302,11 @@ static struct crypto_alg des3_128_alg = { | |||
302 | * property. | 302 | * property. |
303 | * | 303 | * |
304 | */ | 304 | */ |
305 | static int des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, | 305 | static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key, |
306 | u32 *flags) | 306 | unsigned int keylen, u32 *flags) |
307 | { | 307 | { |
308 | int i, ret; | 308 | int i, ret; |
309 | struct crypt_s390_des3_192_ctx *dctx = ctx; | 309 | struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); |
310 | const u8* temp_key = key; | 310 | const u8* temp_key = key; |
311 | 311 | ||
312 | if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && | 312 | if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && |
@@ -325,17 +325,17 @@ static int des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, | |||
325 | return 0; | 325 | return 0; |
326 | } | 326 | } |
327 | 327 | ||
328 | static void des3_192_encrypt(void *ctx, u8 *dst, const u8 *src) | 328 | static void des3_192_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
329 | { | 329 | { |
330 | struct crypt_s390_des3_192_ctx *dctx = ctx; | 330 | struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); |
331 | 331 | ||
332 | crypt_s390_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src, | 332 | crypt_s390_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src, |
333 | DES3_192_BLOCK_SIZE); | 333 | DES3_192_BLOCK_SIZE); |
334 | } | 334 | } |
335 | 335 | ||
336 | static void des3_192_decrypt(void *ctx, u8 *dst, const u8 *src) | 336 | static void des3_192_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
337 | { | 337 | { |
338 | struct crypt_s390_des3_192_ctx *dctx = ctx; | 338 | struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); |
339 | 339 | ||
340 | crypt_s390_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src, | 340 | crypt_s390_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src, |
341 | DES3_192_BLOCK_SIZE); | 341 | DES3_192_BLOCK_SIZE); |
diff --git a/arch/s390/crypto/sha1_s390.c b/arch/s390/crypto/sha1_s390.c index 98c896b86dcd..9d34a35b1aa5 100644 --- a/arch/s390/crypto/sha1_s390.c +++ b/arch/s390/crypto/sha1_s390.c | |||
@@ -40,28 +40,29 @@ struct crypt_s390_sha1_ctx { | |||
40 | u8 buffer[2 * SHA1_BLOCK_SIZE]; | 40 | u8 buffer[2 * SHA1_BLOCK_SIZE]; |
41 | }; | 41 | }; |
42 | 42 | ||
43 | static void | 43 | static void sha1_init(struct crypto_tfm *tfm) |
44 | sha1_init(void *ctx) | ||
45 | { | 44 | { |
46 | static const struct crypt_s390_sha1_ctx initstate = { | 45 | struct crypt_s390_sha1_ctx *ctx = crypto_tfm_ctx(tfm); |
47 | .state = { | 46 | static const u32 initstate[5] = { |
48 | 0x67452301, | 47 | 0x67452301, |
49 | 0xEFCDAB89, | 48 | 0xEFCDAB89, |
50 | 0x98BADCFE, | 49 | 0x98BADCFE, |
51 | 0x10325476, | 50 | 0x10325476, |
52 | 0xC3D2E1F0 | 51 | 0xC3D2E1F0 |
53 | }, | ||
54 | }; | 52 | }; |
55 | memcpy(ctx, &initstate, sizeof(initstate)); | 53 | |
54 | ctx->count = 0; | ||
55 | memcpy(ctx->state, &initstate, sizeof(initstate)); | ||
56 | ctx->buf_len = 0; | ||
56 | } | 57 | } |
57 | 58 | ||
58 | static void | 59 | static void sha1_update(struct crypto_tfm *tfm, const u8 *data, |
59 | sha1_update(void *ctx, const u8 *data, unsigned int len) | 60 | unsigned int len) |
60 | { | 61 | { |
61 | struct crypt_s390_sha1_ctx *sctx; | 62 | struct crypt_s390_sha1_ctx *sctx; |
62 | long imd_len; | 63 | long imd_len; |
63 | 64 | ||
64 | sctx = ctx; | 65 | sctx = crypto_tfm_ctx(tfm); |
65 | sctx->count += len * 8; //message bit length | 66 | sctx->count += len * 8; //message bit length |
66 | 67 | ||
67 | //anything in buffer yet? -> must be completed | 68 | //anything in buffer yet? -> must be completed |
@@ -110,10 +111,9 @@ pad_message(struct crypt_s390_sha1_ctx* sctx) | |||
110 | } | 111 | } |
111 | 112 | ||
112 | /* Add padding and return the message digest. */ | 113 | /* Add padding and return the message digest. */ |
113 | static void | 114 | static void sha1_final(struct crypto_tfm *tfm, u8 *out) |
114 | sha1_final(void* ctx, u8 *out) | ||
115 | { | 115 | { |
116 | struct crypt_s390_sha1_ctx *sctx = ctx; | 116 | struct crypt_s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm); |
117 | 117 | ||
118 | //must perform manual padding | 118 | //must perform manual padding |
119 | pad_message(sctx); | 119 | pad_message(sctx); |
diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c index 1ec5e92b3454..f573df30f31d 100644 --- a/arch/s390/crypto/sha256_s390.c +++ b/arch/s390/crypto/sha256_s390.c | |||
@@ -31,9 +31,9 @@ struct s390_sha256_ctx { | |||
31 | u8 buf[2 * SHA256_BLOCK_SIZE]; | 31 | u8 buf[2 * SHA256_BLOCK_SIZE]; |
32 | }; | 32 | }; |
33 | 33 | ||
34 | static void sha256_init(void *ctx) | 34 | static void sha256_init(struct crypto_tfm *tfm) |
35 | { | 35 | { |
36 | struct s390_sha256_ctx *sctx = ctx; | 36 | struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); |
37 | 37 | ||
38 | sctx->state[0] = 0x6a09e667; | 38 | sctx->state[0] = 0x6a09e667; |
39 | sctx->state[1] = 0xbb67ae85; | 39 | sctx->state[1] = 0xbb67ae85; |
@@ -44,12 +44,12 @@ static void sha256_init(void *ctx) | |||
44 | sctx->state[6] = 0x1f83d9ab; | 44 | sctx->state[6] = 0x1f83d9ab; |
45 | sctx->state[7] = 0x5be0cd19; | 45 | sctx->state[7] = 0x5be0cd19; |
46 | sctx->count = 0; | 46 | sctx->count = 0; |
47 | memset(sctx->buf, 0, sizeof(sctx->buf)); | ||
48 | } | 47 | } |
49 | 48 | ||
50 | static void sha256_update(void *ctx, const u8 *data, unsigned int len) | 49 | static void sha256_update(struct crypto_tfm *tfm, const u8 *data, |
50 | unsigned int len) | ||
51 | { | 51 | { |
52 | struct s390_sha256_ctx *sctx = ctx; | 52 | struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); |
53 | unsigned int index; | 53 | unsigned int index; |
54 | int ret; | 54 | int ret; |
55 | 55 | ||
@@ -108,9 +108,9 @@ static void pad_message(struct s390_sha256_ctx* sctx) | |||
108 | } | 108 | } |
109 | 109 | ||
110 | /* Add padding and return the message digest */ | 110 | /* Add padding and return the message digest */ |
111 | static void sha256_final(void* ctx, u8 *out) | 111 | static void sha256_final(struct crypto_tfm *tfm, u8 *out) |
112 | { | 112 | { |
113 | struct s390_sha256_ctx *sctx = ctx; | 113 | struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); |
114 | 114 | ||
115 | /* must perform manual padding */ | 115 | /* must perform manual padding */ |
116 | pad_message(sctx); | 116 | pad_message(sctx); |
diff --git a/arch/x86_64/crypto/aes-x86_64-asm.S b/arch/x86_64/crypto/aes-x86_64-asm.S index 483cbb23ab8d..26b40de4d0b0 100644 --- a/arch/x86_64/crypto/aes-x86_64-asm.S +++ b/arch/x86_64/crypto/aes-x86_64-asm.S | |||
@@ -15,6 +15,10 @@ | |||
15 | 15 | ||
16 | .text | 16 | .text |
17 | 17 | ||
18 | #include <asm/asm-offsets.h> | ||
19 | |||
20 | #define BASE crypto_tfm_ctx_offset | ||
21 | |||
18 | #define R1 %rax | 22 | #define R1 %rax |
19 | #define R1E %eax | 23 | #define R1E %eax |
20 | #define R1X %ax | 24 | #define R1X %ax |
@@ -46,19 +50,19 @@ | |||
46 | #define R10 %r10 | 50 | #define R10 %r10 |
47 | #define R11 %r11 | 51 | #define R11 %r11 |
48 | 52 | ||
49 | #define prologue(FUNC,BASE,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \ | 53 | #define prologue(FUNC,KEY,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \ |
50 | .global FUNC; \ | 54 | .global FUNC; \ |
51 | .type FUNC,@function; \ | 55 | .type FUNC,@function; \ |
52 | .align 8; \ | 56 | .align 8; \ |
53 | FUNC: movq r1,r2; \ | 57 | FUNC: movq r1,r2; \ |
54 | movq r3,r4; \ | 58 | movq r3,r4; \ |
55 | leaq BASE+52(r8),r9; \ | 59 | leaq BASE+KEY+52(r8),r9; \ |
56 | movq r10,r11; \ | 60 | movq r10,r11; \ |
57 | movl (r7),r5 ## E; \ | 61 | movl (r7),r5 ## E; \ |
58 | movl 4(r7),r1 ## E; \ | 62 | movl 4(r7),r1 ## E; \ |
59 | movl 8(r7),r6 ## E; \ | 63 | movl 8(r7),r6 ## E; \ |
60 | movl 12(r7),r7 ## E; \ | 64 | movl 12(r7),r7 ## E; \ |
61 | movl (r8),r10 ## E; \ | 65 | movl BASE(r8),r10 ## E; \ |
62 | xorl -48(r9),r5 ## E; \ | 66 | xorl -48(r9),r5 ## E; \ |
63 | xorl -44(r9),r1 ## E; \ | 67 | xorl -44(r9),r1 ## E; \ |
64 | xorl -40(r9),r6 ## E; \ | 68 | xorl -40(r9),r6 ## E; \ |
@@ -128,8 +132,8 @@ FUNC: movq r1,r2; \ | |||
128 | movl r3 ## E,r1 ## E; \ | 132 | movl r3 ## E,r1 ## E; \ |
129 | movl r4 ## E,r2 ## E; | 133 | movl r4 ## E,r2 ## E; |
130 | 134 | ||
131 | #define entry(FUNC,BASE,B128,B192) \ | 135 | #define entry(FUNC,KEY,B128,B192) \ |
132 | prologue(FUNC,BASE,B128,B192,R2,R8,R7,R9,R1,R3,R4,R6,R10,R5,R11) | 136 | prologue(FUNC,KEY,B128,B192,R2,R8,R7,R9,R1,R3,R4,R6,R10,R5,R11) |
133 | 137 | ||
134 | #define return epilogue(R8,R2,R9,R7,R5,R6,R3,R4,R11) | 138 | #define return epilogue(R8,R2,R9,R7,R5,R6,R3,R4,R11) |
135 | 139 | ||
@@ -147,9 +151,9 @@ FUNC: movq r1,r2; \ | |||
147 | #define decrypt_final(TAB,OFFSET) \ | 151 | #define decrypt_final(TAB,OFFSET) \ |
148 | round(TAB,OFFSET,R2,R1,R4,R3,R6,R5,R7,R10,R5,R6,R3,R4) | 152 | round(TAB,OFFSET,R2,R1,R4,R3,R6,R5,R7,R10,R5,R6,R3,R4) |
149 | 153 | ||
150 | /* void aes_encrypt(void *ctx, u8 *out, const u8 *in) */ | 154 | /* void aes_enc_blk(stuct crypto_tfm *tfm, u8 *out, const u8 *in) */ |
151 | 155 | ||
152 | entry(aes_encrypt,0,enc128,enc192) | 156 | entry(aes_enc_blk,0,enc128,enc192) |
153 | encrypt_round(aes_ft_tab,-96) | 157 | encrypt_round(aes_ft_tab,-96) |
154 | encrypt_round(aes_ft_tab,-80) | 158 | encrypt_round(aes_ft_tab,-80) |
155 | enc192: encrypt_round(aes_ft_tab,-64) | 159 | enc192: encrypt_round(aes_ft_tab,-64) |
@@ -166,9 +170,9 @@ enc128: encrypt_round(aes_ft_tab,-32) | |||
166 | encrypt_final(aes_fl_tab,112) | 170 | encrypt_final(aes_fl_tab,112) |
167 | return | 171 | return |
168 | 172 | ||
169 | /* void aes_decrypt(void *ctx, u8 *out, const u8 *in) */ | 173 | /* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in) */ |
170 | 174 | ||
171 | entry(aes_decrypt,240,dec128,dec192) | 175 | entry(aes_dec_blk,240,dec128,dec192) |
172 | decrypt_round(aes_it_tab,-96) | 176 | decrypt_round(aes_it_tab,-96) |
173 | decrypt_round(aes_it_tab,-80) | 177 | decrypt_round(aes_it_tab,-80) |
174 | dec192: decrypt_round(aes_it_tab,-64) | 178 | dec192: decrypt_round(aes_it_tab,-64) |
diff --git a/arch/x86_64/crypto/aes.c b/arch/x86_64/crypto/aes.c index 6f77e7700d32..68866fab37aa 100644 --- a/arch/x86_64/crypto/aes.c +++ b/arch/x86_64/crypto/aes.c | |||
@@ -227,10 +227,10 @@ static void __init gen_tabs(void) | |||
227 | t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \ | 227 | t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \ |
228 | } | 228 | } |
229 | 229 | ||
230 | static int aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, | 230 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
231 | u32 *flags) | 231 | unsigned int key_len, u32 *flags) |
232 | { | 232 | { |
233 | struct aes_ctx *ctx = ctx_arg; | 233 | struct aes_ctx *ctx = crypto_tfm_ctx(tfm); |
234 | const __le32 *key = (const __le32 *)in_key; | 234 | const __le32 *key = (const __le32 *)in_key; |
235 | u32 i, j, t, u, v, w; | 235 | u32 i, j, t, u, v, w; |
236 | 236 | ||
@@ -283,8 +283,18 @@ static int aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, | |||
283 | return 0; | 283 | return 0; |
284 | } | 284 | } |
285 | 285 | ||
286 | extern void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in); | 286 | asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in); |
287 | extern void aes_decrypt(void *ctx_arg, u8 *out, const u8 *in); | 287 | asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in); |
288 | |||
289 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | ||
290 | { | ||
291 | aes_enc_blk(tfm, dst, src); | ||
292 | } | ||
293 | |||
294 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) | ||
295 | { | ||
296 | aes_dec_blk(tfm, dst, src); | ||
297 | } | ||
288 | 298 | ||
289 | static struct crypto_alg aes_alg = { | 299 | static struct crypto_alg aes_alg = { |
290 | .cra_name = "aes", | 300 | .cra_name = "aes", |
diff --git a/arch/x86_64/kernel/asm-offsets.c b/arch/x86_64/kernel/asm-offsets.c index 38834bbbae11..96687e2beb2c 100644 --- a/arch/x86_64/kernel/asm-offsets.c +++ b/arch/x86_64/kernel/asm-offsets.c | |||
@@ -4,6 +4,7 @@ | |||
4 | * and format the required data. | 4 | * and format the required data. |
5 | */ | 5 | */ |
6 | 6 | ||
7 | #include <linux/crypto.h> | ||
7 | #include <linux/sched.h> | 8 | #include <linux/sched.h> |
8 | #include <linux/stddef.h> | 9 | #include <linux/stddef.h> |
9 | #include <linux/errno.h> | 10 | #include <linux/errno.h> |
@@ -68,5 +69,7 @@ int main(void) | |||
68 | DEFINE(pbe_next, offsetof(struct pbe, next)); | 69 | DEFINE(pbe_next, offsetof(struct pbe, next)); |
69 | BLANK(); | 70 | BLANK(); |
70 | DEFINE(TSS_ist, offsetof(struct tss_struct, ist)); | 71 | DEFINE(TSS_ist, offsetof(struct tss_struct, ist)); |
72 | BLANK(); | ||
73 | DEFINE(crypto_tfm_ctx_offset, offsetof(struct crypto_tfm, __crt_ctx)); | ||
71 | return 0; | 74 | return 0; |
72 | } | 75 | } |
diff --git a/crypto/Kconfig b/crypto/Kconfig index c442f2e7ce46..ba133d557045 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig | |||
@@ -337,7 +337,7 @@ config CRYPTO_CRC32C | |||
337 | 337 | ||
338 | config CRYPTO_TEST | 338 | config CRYPTO_TEST |
339 | tristate "Testing module" | 339 | tristate "Testing module" |
340 | depends on CRYPTO | 340 | depends on CRYPTO && m |
341 | help | 341 | help |
342 | Quick & dirty crypto test module. | 342 | Quick & dirty crypto test module. |
343 | 343 | ||
diff --git a/crypto/aes.c b/crypto/aes.c index a5017292e066..a038711831e7 100644 --- a/crypto/aes.c +++ b/crypto/aes.c | |||
@@ -248,10 +248,10 @@ gen_tabs (void) | |||
248 | t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \ | 248 | t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \ |
249 | } | 249 | } |
250 | 250 | ||
251 | static int | 251 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
252 | aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) | 252 | unsigned int key_len, u32 *flags) |
253 | { | 253 | { |
254 | struct aes_ctx *ctx = ctx_arg; | 254 | struct aes_ctx *ctx = crypto_tfm_ctx(tfm); |
255 | const __le32 *key = (const __le32 *)in_key; | 255 | const __le32 *key = (const __le32 *)in_key; |
256 | u32 i, t, u, v, w; | 256 | u32 i, t, u, v, w; |
257 | 257 | ||
@@ -318,9 +318,9 @@ aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) | |||
318 | f_rl(bo, bi, 2, k); \ | 318 | f_rl(bo, bi, 2, k); \ |
319 | f_rl(bo, bi, 3, k) | 319 | f_rl(bo, bi, 3, k) |
320 | 320 | ||
321 | static void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in) | 321 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
322 | { | 322 | { |
323 | const struct aes_ctx *ctx = ctx_arg; | 323 | const struct aes_ctx *ctx = crypto_tfm_ctx(tfm); |
324 | const __le32 *src = (const __le32 *)in; | 324 | const __le32 *src = (const __le32 *)in; |
325 | __le32 *dst = (__le32 *)out; | 325 | __le32 *dst = (__le32 *)out; |
326 | u32 b0[4], b1[4]; | 326 | u32 b0[4], b1[4]; |
@@ -373,9 +373,9 @@ static void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in) | |||
373 | i_rl(bo, bi, 2, k); \ | 373 | i_rl(bo, bi, 2, k); \ |
374 | i_rl(bo, bi, 3, k) | 374 | i_rl(bo, bi, 3, k) |
375 | 375 | ||
376 | static void aes_decrypt(void *ctx_arg, u8 *out, const u8 *in) | 376 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
377 | { | 377 | { |
378 | const struct aes_ctx *ctx = ctx_arg; | 378 | const struct aes_ctx *ctx = crypto_tfm_ctx(tfm); |
379 | const __le32 *src = (const __le32 *)in; | 379 | const __le32 *src = (const __le32 *)in; |
380 | __le32 *dst = (__le32 *)out; | 380 | __le32 *dst = (__le32 *)out; |
381 | u32 b0[4], b1[4]; | 381 | u32 b0[4], b1[4]; |
diff --git a/crypto/anubis.c b/crypto/anubis.c index 2c796bdb91a6..7e2e1a29800e 100644 --- a/crypto/anubis.c +++ b/crypto/anubis.c | |||
@@ -460,16 +460,15 @@ static const u32 rc[] = { | |||
460 | 0xf726ffedU, 0xe89d6f8eU, 0x19a0f089U, | 460 | 0xf726ffedU, 0xe89d6f8eU, 0x19a0f089U, |
461 | }; | 461 | }; |
462 | 462 | ||
463 | static int anubis_setkey(void *ctx_arg, const u8 *in_key, | 463 | static int anubis_setkey(struct crypto_tfm *tfm, const u8 *in_key, |
464 | unsigned int key_len, u32 *flags) | 464 | unsigned int key_len, u32 *flags) |
465 | { | 465 | { |
466 | struct anubis_ctx *ctx = crypto_tfm_ctx(tfm); | ||
466 | const __be32 *key = (const __be32 *)in_key; | 467 | const __be32 *key = (const __be32 *)in_key; |
467 | int N, R, i, r; | 468 | int N, R, i, r; |
468 | u32 kappa[ANUBIS_MAX_N]; | 469 | u32 kappa[ANUBIS_MAX_N]; |
469 | u32 inter[ANUBIS_MAX_N]; | 470 | u32 inter[ANUBIS_MAX_N]; |
470 | 471 | ||
471 | struct anubis_ctx *ctx = ctx_arg; | ||
472 | |||
473 | switch (key_len) | 472 | switch (key_len) |
474 | { | 473 | { |
475 | case 16: case 20: case 24: case 28: | 474 | case 16: case 20: case 24: case 28: |
@@ -660,15 +659,15 @@ static void anubis_crypt(u32 roundKey[ANUBIS_MAX_ROUNDS + 1][4], | |||
660 | dst[i] = cpu_to_be32(inter[i]); | 659 | dst[i] = cpu_to_be32(inter[i]); |
661 | } | 660 | } |
662 | 661 | ||
663 | static void anubis_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | 662 | static void anubis_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
664 | { | 663 | { |
665 | struct anubis_ctx *ctx = ctx_arg; | 664 | struct anubis_ctx *ctx = crypto_tfm_ctx(tfm); |
666 | anubis_crypt(ctx->E, dst, src, ctx->R); | 665 | anubis_crypt(ctx->E, dst, src, ctx->R); |
667 | } | 666 | } |
668 | 667 | ||
669 | static void anubis_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | 668 | static void anubis_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
670 | { | 669 | { |
671 | struct anubis_ctx *ctx = ctx_arg; | 670 | struct anubis_ctx *ctx = crypto_tfm_ctx(tfm); |
672 | anubis_crypt(ctx->D, dst, src, ctx->R); | 671 | anubis_crypt(ctx->D, dst, src, ctx->R); |
673 | } | 672 | } |
674 | 673 | ||
diff --git a/crypto/api.c b/crypto/api.c index 80bba637fba7..c11ec1fd4f18 100644 --- a/crypto/api.c +++ b/crypto/api.c | |||
@@ -188,13 +188,16 @@ struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) | |||
188 | if (crypto_init_flags(tfm, flags)) | 188 | if (crypto_init_flags(tfm, flags)) |
189 | goto out_free_tfm; | 189 | goto out_free_tfm; |
190 | 190 | ||
191 | if (crypto_init_ops(tfm)) { | 191 | if (crypto_init_ops(tfm)) |
192 | crypto_exit_ops(tfm); | ||
193 | goto out_free_tfm; | 192 | goto out_free_tfm; |
194 | } | 193 | |
194 | if (alg->cra_init && alg->cra_init(tfm)) | ||
195 | goto cra_init_failed; | ||
195 | 196 | ||
196 | goto out; | 197 | goto out; |
197 | 198 | ||
199 | cra_init_failed: | ||
200 | crypto_exit_ops(tfm); | ||
198 | out_free_tfm: | 201 | out_free_tfm: |
199 | kfree(tfm); | 202 | kfree(tfm); |
200 | tfm = NULL; | 203 | tfm = NULL; |
@@ -215,6 +218,8 @@ void crypto_free_tfm(struct crypto_tfm *tfm) | |||
215 | alg = tfm->__crt_alg; | 218 | alg = tfm->__crt_alg; |
216 | size = sizeof(*tfm) + alg->cra_ctxsize; | 219 | size = sizeof(*tfm) + alg->cra_ctxsize; |
217 | 220 | ||
221 | if (alg->cra_exit) | ||
222 | alg->cra_exit(tfm); | ||
218 | crypto_exit_ops(tfm); | 223 | crypto_exit_ops(tfm); |
219 | crypto_alg_put(alg); | 224 | crypto_alg_put(alg); |
220 | memset(tfm, 0, size); | 225 | memset(tfm, 0, size); |
@@ -224,7 +229,7 @@ void crypto_free_tfm(struct crypto_tfm *tfm) | |||
224 | static inline int crypto_set_driver_name(struct crypto_alg *alg) | 229 | static inline int crypto_set_driver_name(struct crypto_alg *alg) |
225 | { | 230 | { |
226 | static const char suffix[] = "-generic"; | 231 | static const char suffix[] = "-generic"; |
227 | char *driver_name = (char *)alg->cra_driver_name; | 232 | char *driver_name = alg->cra_driver_name; |
228 | int len; | 233 | int len; |
229 | 234 | ||
230 | if (*driver_name) | 235 | if (*driver_name) |
@@ -262,13 +267,13 @@ int crypto_register_alg(struct crypto_alg *alg) | |||
262 | down_write(&crypto_alg_sem); | 267 | down_write(&crypto_alg_sem); |
263 | 268 | ||
264 | list_for_each_entry(q, &crypto_alg_list, cra_list) { | 269 | list_for_each_entry(q, &crypto_alg_list, cra_list) { |
265 | if (!strcmp(q->cra_driver_name, alg->cra_driver_name)) { | 270 | if (q == alg) { |
266 | ret = -EEXIST; | 271 | ret = -EEXIST; |
267 | goto out; | 272 | goto out; |
268 | } | 273 | } |
269 | } | 274 | } |
270 | 275 | ||
271 | list_add_tail(&alg->cra_list, &crypto_alg_list); | 276 | list_add(&alg->cra_list, &crypto_alg_list); |
272 | out: | 277 | out: |
273 | up_write(&crypto_alg_sem); | 278 | up_write(&crypto_alg_sem); |
274 | return ret; | 279 | return ret; |
diff --git a/crypto/arc4.c b/crypto/arc4.c index 9efbcaae88a1..5edc6a65b987 100644 --- a/crypto/arc4.c +++ b/crypto/arc4.c | |||
@@ -24,9 +24,10 @@ struct arc4_ctx { | |||
24 | u8 x, y; | 24 | u8 x, y; |
25 | }; | 25 | }; |
26 | 26 | ||
27 | static int arc4_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) | 27 | static int arc4_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
28 | unsigned int key_len, u32 *flags) | ||
28 | { | 29 | { |
29 | struct arc4_ctx *ctx = ctx_arg; | 30 | struct arc4_ctx *ctx = crypto_tfm_ctx(tfm); |
30 | int i, j = 0, k = 0; | 31 | int i, j = 0, k = 0; |
31 | 32 | ||
32 | ctx->x = 1; | 33 | ctx->x = 1; |
@@ -48,9 +49,9 @@ static int arc4_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u | |||
48 | return 0; | 49 | return 0; |
49 | } | 50 | } |
50 | 51 | ||
51 | static void arc4_crypt(void *ctx_arg, u8 *out, const u8 *in) | 52 | static void arc4_crypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
52 | { | 53 | { |
53 | struct arc4_ctx *ctx = ctx_arg; | 54 | struct arc4_ctx *ctx = crypto_tfm_ctx(tfm); |
54 | 55 | ||
55 | u8 *const S = ctx->S; | 56 | u8 *const S = ctx->S; |
56 | u8 x = ctx->x; | 57 | u8 x = ctx->x; |
diff --git a/crypto/blowfish.c b/crypto/blowfish.c index 7f710b201f20..490265f42b3b 100644 --- a/crypto/blowfish.c +++ b/crypto/blowfish.c | |||
@@ -349,7 +349,7 @@ static void encrypt_block(struct bf_ctx *bctx, u32 *dst, u32 *src) | |||
349 | dst[1] = yl; | 349 | dst[1] = yl; |
350 | } | 350 | } |
351 | 351 | ||
352 | static void bf_encrypt(void *ctx, u8 *dst, const u8 *src) | 352 | static void bf_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
353 | { | 353 | { |
354 | const __be32 *in_blk = (const __be32 *)src; | 354 | const __be32 *in_blk = (const __be32 *)src; |
355 | __be32 *const out_blk = (__be32 *)dst; | 355 | __be32 *const out_blk = (__be32 *)dst; |
@@ -357,17 +357,18 @@ static void bf_encrypt(void *ctx, u8 *dst, const u8 *src) | |||
357 | 357 | ||
358 | in32[0] = be32_to_cpu(in_blk[0]); | 358 | in32[0] = be32_to_cpu(in_blk[0]); |
359 | in32[1] = be32_to_cpu(in_blk[1]); | 359 | in32[1] = be32_to_cpu(in_blk[1]); |
360 | encrypt_block(ctx, out32, in32); | 360 | encrypt_block(crypto_tfm_ctx(tfm), out32, in32); |
361 | out_blk[0] = cpu_to_be32(out32[0]); | 361 | out_blk[0] = cpu_to_be32(out32[0]); |
362 | out_blk[1] = cpu_to_be32(out32[1]); | 362 | out_blk[1] = cpu_to_be32(out32[1]); |
363 | } | 363 | } |
364 | 364 | ||
365 | static void bf_decrypt(void *ctx, u8 *dst, const u8 *src) | 365 | static void bf_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
366 | { | 366 | { |
367 | struct bf_ctx *ctx = crypto_tfm_ctx(tfm); | ||
367 | const __be32 *in_blk = (const __be32 *)src; | 368 | const __be32 *in_blk = (const __be32 *)src; |
368 | __be32 *const out_blk = (__be32 *)dst; | 369 | __be32 *const out_blk = (__be32 *)dst; |
369 | const u32 *P = ((struct bf_ctx *)ctx)->p; | 370 | const u32 *P = ctx->p; |
370 | const u32 *S = ((struct bf_ctx *)ctx)->s; | 371 | const u32 *S = ctx->s; |
371 | u32 yl = be32_to_cpu(in_blk[0]); | 372 | u32 yl = be32_to_cpu(in_blk[0]); |
372 | u32 yr = be32_to_cpu(in_blk[1]); | 373 | u32 yr = be32_to_cpu(in_blk[1]); |
373 | 374 | ||
@@ -398,12 +399,14 @@ static void bf_decrypt(void *ctx, u8 *dst, const u8 *src) | |||
398 | /* | 399 | /* |
399 | * Calculates the blowfish S and P boxes for encryption and decryption. | 400 | * Calculates the blowfish S and P boxes for encryption and decryption. |
400 | */ | 401 | */ |
401 | static int bf_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) | 402 | static int bf_setkey(struct crypto_tfm *tfm, const u8 *key, |
403 | unsigned int keylen, u32 *flags) | ||
402 | { | 404 | { |
405 | struct bf_ctx *ctx = crypto_tfm_ctx(tfm); | ||
406 | u32 *P = ctx->p; | ||
407 | u32 *S = ctx->s; | ||
403 | short i, j, count; | 408 | short i, j, count; |
404 | u32 data[2], temp; | 409 | u32 data[2], temp; |
405 | u32 *P = ((struct bf_ctx *)ctx)->p; | ||
406 | u32 *S = ((struct bf_ctx *)ctx)->s; | ||
407 | 410 | ||
408 | /* Copy the initialization s-boxes */ | 411 | /* Copy the initialization s-boxes */ |
409 | for (i = 0, count = 0; i < 256; i++) | 412 | for (i = 0, count = 0; i < 256; i++) |
diff --git a/crypto/cast5.c b/crypto/cast5.c index 8834c8580c04..08eef58c1d3d 100644 --- a/crypto/cast5.c +++ b/crypto/cast5.c | |||
@@ -577,9 +577,9 @@ static const u32 sb8[256] = { | |||
577 | (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]) ) | 577 | (((s1[I >> 24] + s2[(I>>16)&0xff]) ^ s3[(I>>8)&0xff]) - s4[I&0xff]) ) |
578 | 578 | ||
579 | 579 | ||
580 | static void cast5_encrypt(void *ctx, u8 * outbuf, const u8 * inbuf) | 580 | static void cast5_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) |
581 | { | 581 | { |
582 | struct cast5_ctx *c = (struct cast5_ctx *) ctx; | 582 | struct cast5_ctx *c = crypto_tfm_ctx(tfm); |
583 | const __be32 *src = (const __be32 *)inbuf; | 583 | const __be32 *src = (const __be32 *)inbuf; |
584 | __be32 *dst = (__be32 *)outbuf; | 584 | __be32 *dst = (__be32 *)outbuf; |
585 | u32 l, r, t; | 585 | u32 l, r, t; |
@@ -642,9 +642,9 @@ static void cast5_encrypt(void *ctx, u8 * outbuf, const u8 * inbuf) | |||
642 | dst[1] = cpu_to_be32(l); | 642 | dst[1] = cpu_to_be32(l); |
643 | } | 643 | } |
644 | 644 | ||
645 | static void cast5_decrypt(void *ctx, u8 * outbuf, const u8 * inbuf) | 645 | static void cast5_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) |
646 | { | 646 | { |
647 | struct cast5_ctx *c = (struct cast5_ctx *) ctx; | 647 | struct cast5_ctx *c = crypto_tfm_ctx(tfm); |
648 | const __be32 *src = (const __be32 *)inbuf; | 648 | const __be32 *src = (const __be32 *)inbuf; |
649 | __be32 *dst = (__be32 *)outbuf; | 649 | __be32 *dst = (__be32 *)outbuf; |
650 | u32 l, r, t; | 650 | u32 l, r, t; |
@@ -769,15 +769,15 @@ static void key_schedule(u32 * x, u32 * z, u32 * k) | |||
769 | } | 769 | } |
770 | 770 | ||
771 | 771 | ||
772 | static int | 772 | static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, |
773 | cast5_setkey(void *ctx, const u8 * key, unsigned key_len, u32 * flags) | 773 | unsigned key_len, u32 *flags) |
774 | { | 774 | { |
775 | struct cast5_ctx *c = crypto_tfm_ctx(tfm); | ||
775 | int i; | 776 | int i; |
776 | u32 x[4]; | 777 | u32 x[4]; |
777 | u32 z[4]; | 778 | u32 z[4]; |
778 | u32 k[16]; | 779 | u32 k[16]; |
779 | __be32 p_key[4]; | 780 | __be32 p_key[4]; |
780 | struct cast5_ctx *c = (struct cast5_ctx *) ctx; | ||
781 | 781 | ||
782 | if (key_len < 5 || key_len > 16) { | 782 | if (key_len < 5 || key_len > 16) { |
783 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | 783 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
diff --git a/crypto/cast6.c b/crypto/cast6.c index 9e28740ba775..08e33bfc3ad1 100644 --- a/crypto/cast6.c +++ b/crypto/cast6.c | |||
@@ -381,13 +381,13 @@ static inline void W(u32 *key, unsigned int i) { | |||
381 | key[7] ^= F2(key[0], Tr[i % 4][7], Tm[i][7]); | 381 | key[7] ^= F2(key[0], Tr[i % 4][7], Tm[i][7]); |
382 | } | 382 | } |
383 | 383 | ||
384 | static int | 384 | static int cast6_setkey(struct crypto_tfm *tfm, const u8 *in_key, |
385 | cast6_setkey(void *ctx, const u8 * in_key, unsigned key_len, u32 * flags) | 385 | unsigned key_len, u32 *flags) |
386 | { | 386 | { |
387 | int i; | 387 | int i; |
388 | u32 key[8]; | 388 | u32 key[8]; |
389 | __be32 p_key[8]; /* padded key */ | 389 | __be32 p_key[8]; /* padded key */ |
390 | struct cast6_ctx *c = (struct cast6_ctx *) ctx; | 390 | struct cast6_ctx *c = crypto_tfm_ctx(tfm); |
391 | 391 | ||
392 | if (key_len < 16 || key_len > 32 || key_len % 4 != 0) { | 392 | if (key_len < 16 || key_len > 32 || key_len % 4 != 0) { |
393 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | 393 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
@@ -444,8 +444,9 @@ static inline void QBAR (u32 * block, u8 * Kr, u32 * Km) { | |||
444 | block[2] ^= F1(block[3], Kr[0], Km[0]); | 444 | block[2] ^= F1(block[3], Kr[0], Km[0]); |
445 | } | 445 | } |
446 | 446 | ||
447 | static void cast6_encrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { | 447 | static void cast6_encrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) |
448 | struct cast6_ctx * c = (struct cast6_ctx *)ctx; | 448 | { |
449 | struct cast6_ctx *c = crypto_tfm_ctx(tfm); | ||
449 | const __be32 *src = (const __be32 *)inbuf; | 450 | const __be32 *src = (const __be32 *)inbuf; |
450 | __be32 *dst = (__be32 *)outbuf; | 451 | __be32 *dst = (__be32 *)outbuf; |
451 | u32 block[4]; | 452 | u32 block[4]; |
@@ -476,8 +477,8 @@ static void cast6_encrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { | |||
476 | dst[3] = cpu_to_be32(block[3]); | 477 | dst[3] = cpu_to_be32(block[3]); |
477 | } | 478 | } |
478 | 479 | ||
479 | static void cast6_decrypt (void * ctx, u8 * outbuf, const u8 * inbuf) { | 480 | static void cast6_decrypt(struct crypto_tfm *tfm, u8 *outbuf, const u8 *inbuf) { |
480 | struct cast6_ctx * c = (struct cast6_ctx *)ctx; | 481 | struct cast6_ctx * c = crypto_tfm_ctx(tfm); |
481 | const __be32 *src = (const __be32 *)inbuf; | 482 | const __be32 *src = (const __be32 *)inbuf; |
482 | __be32 *dst = (__be32 *)outbuf; | 483 | __be32 *dst = (__be32 *)outbuf; |
483 | u32 block[4]; | 484 | u32 block[4]; |
diff --git a/crypto/cipher.c b/crypto/cipher.c index 65bcea0cd17c..b899eb97abd7 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c | |||
@@ -187,7 +187,7 @@ static unsigned int cbc_process_encrypt(const struct cipher_desc *desc, | |||
187 | void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; | 187 | void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; |
188 | int bsize = crypto_tfm_alg_blocksize(tfm); | 188 | int bsize = crypto_tfm_alg_blocksize(tfm); |
189 | 189 | ||
190 | void (*fn)(void *, u8 *, const u8 *) = desc->crfn; | 190 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; |
191 | u8 *iv = desc->info; | 191 | u8 *iv = desc->info; |
192 | unsigned int done = 0; | 192 | unsigned int done = 0; |
193 | 193 | ||
@@ -195,7 +195,7 @@ static unsigned int cbc_process_encrypt(const struct cipher_desc *desc, | |||
195 | 195 | ||
196 | do { | 196 | do { |
197 | xor(iv, src); | 197 | xor(iv, src); |
198 | fn(crypto_tfm_ctx(tfm), dst, iv); | 198 | fn(tfm, dst, iv); |
199 | memcpy(iv, dst, bsize); | 199 | memcpy(iv, dst, bsize); |
200 | 200 | ||
201 | src += bsize; | 201 | src += bsize; |
@@ -218,7 +218,7 @@ static unsigned int cbc_process_decrypt(const struct cipher_desc *desc, | |||
218 | u8 *buf = (u8 *)ALIGN((unsigned long)stack, alignmask + 1); | 218 | u8 *buf = (u8 *)ALIGN((unsigned long)stack, alignmask + 1); |
219 | u8 **dst_p = src == dst ? &buf : &dst; | 219 | u8 **dst_p = src == dst ? &buf : &dst; |
220 | 220 | ||
221 | void (*fn)(void *, u8 *, const u8 *) = desc->crfn; | 221 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; |
222 | u8 *iv = desc->info; | 222 | u8 *iv = desc->info; |
223 | unsigned int done = 0; | 223 | unsigned int done = 0; |
224 | 224 | ||
@@ -227,7 +227,7 @@ static unsigned int cbc_process_decrypt(const struct cipher_desc *desc, | |||
227 | do { | 227 | do { |
228 | u8 *tmp_dst = *dst_p; | 228 | u8 *tmp_dst = *dst_p; |
229 | 229 | ||
230 | fn(crypto_tfm_ctx(tfm), tmp_dst, src); | 230 | fn(tfm, tmp_dst, src); |
231 | xor(tmp_dst, iv); | 231 | xor(tmp_dst, iv); |
232 | memcpy(iv, src, bsize); | 232 | memcpy(iv, src, bsize); |
233 | if (tmp_dst != dst) | 233 | if (tmp_dst != dst) |
@@ -245,13 +245,13 @@ static unsigned int ecb_process(const struct cipher_desc *desc, u8 *dst, | |||
245 | { | 245 | { |
246 | struct crypto_tfm *tfm = desc->tfm; | 246 | struct crypto_tfm *tfm = desc->tfm; |
247 | int bsize = crypto_tfm_alg_blocksize(tfm); | 247 | int bsize = crypto_tfm_alg_blocksize(tfm); |
248 | void (*fn)(void *, u8 *, const u8 *) = desc->crfn; | 248 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; |
249 | unsigned int done = 0; | 249 | unsigned int done = 0; |
250 | 250 | ||
251 | nbytes -= bsize; | 251 | nbytes -= bsize; |
252 | 252 | ||
253 | do { | 253 | do { |
254 | fn(crypto_tfm_ctx(tfm), dst, src); | 254 | fn(tfm, dst, src); |
255 | 255 | ||
256 | src += bsize; | 256 | src += bsize; |
257 | dst += bsize; | 257 | dst += bsize; |
@@ -268,7 +268,7 @@ static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) | |||
268 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | 268 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; |
269 | return -EINVAL; | 269 | return -EINVAL; |
270 | } else | 270 | } else |
271 | return cia->cia_setkey(crypto_tfm_ctx(tfm), key, keylen, | 271 | return cia->cia_setkey(tfm, key, keylen, |
272 | &tfm->crt_flags); | 272 | &tfm->crt_flags); |
273 | } | 273 | } |
274 | 274 | ||
diff --git a/crypto/compress.c b/crypto/compress.c index eb36d9364da3..eca182aa3380 100644 --- a/crypto/compress.c +++ b/crypto/compress.c | |||
@@ -22,8 +22,7 @@ static int crypto_compress(struct crypto_tfm *tfm, | |||
22 | const u8 *src, unsigned int slen, | 22 | const u8 *src, unsigned int slen, |
23 | u8 *dst, unsigned int *dlen) | 23 | u8 *dst, unsigned int *dlen) |
24 | { | 24 | { |
25 | return tfm->__crt_alg->cra_compress.coa_compress(crypto_tfm_ctx(tfm), | 25 | return tfm->__crt_alg->cra_compress.coa_compress(tfm, src, slen, dst, |
26 | src, slen, dst, | ||
27 | dlen); | 26 | dlen); |
28 | } | 27 | } |
29 | 28 | ||
@@ -31,8 +30,7 @@ static int crypto_decompress(struct crypto_tfm *tfm, | |||
31 | const u8 *src, unsigned int slen, | 30 | const u8 *src, unsigned int slen, |
32 | u8 *dst, unsigned int *dlen) | 31 | u8 *dst, unsigned int *dlen) |
33 | { | 32 | { |
34 | return tfm->__crt_alg->cra_compress.coa_decompress(crypto_tfm_ctx(tfm), | 33 | return tfm->__crt_alg->cra_compress.coa_decompress(tfm, src, slen, dst, |
35 | src, slen, dst, | ||
36 | dlen); | 34 | dlen); |
37 | } | 35 | } |
38 | 36 | ||
@@ -43,21 +41,14 @@ int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags) | |||
43 | 41 | ||
44 | int crypto_init_compress_ops(struct crypto_tfm *tfm) | 42 | int crypto_init_compress_ops(struct crypto_tfm *tfm) |
45 | { | 43 | { |
46 | int ret = 0; | ||
47 | struct compress_tfm *ops = &tfm->crt_compress; | 44 | struct compress_tfm *ops = &tfm->crt_compress; |
48 | |||
49 | ret = tfm->__crt_alg->cra_compress.coa_init(crypto_tfm_ctx(tfm)); | ||
50 | if (ret) | ||
51 | goto out; | ||
52 | 45 | ||
53 | ops->cot_compress = crypto_compress; | 46 | ops->cot_compress = crypto_compress; |
54 | ops->cot_decompress = crypto_decompress; | 47 | ops->cot_decompress = crypto_decompress; |
55 | 48 | ||
56 | out: | 49 | return 0; |
57 | return ret; | ||
58 | } | 50 | } |
59 | 51 | ||
60 | void crypto_exit_compress_ops(struct crypto_tfm *tfm) | 52 | void crypto_exit_compress_ops(struct crypto_tfm *tfm) |
61 | { | 53 | { |
62 | tfm->__crt_alg->cra_compress.coa_exit(crypto_tfm_ctx(tfm)); | ||
63 | } | 54 | } |
diff --git a/crypto/crc32c.c b/crypto/crc32c.c index 953362423a5c..f2660123aeb4 100644 --- a/crypto/crc32c.c +++ b/crypto/crc32c.c | |||
@@ -31,9 +31,9 @@ struct chksum_ctx { | |||
31 | * crc using table. | 31 | * crc using table. |
32 | */ | 32 | */ |
33 | 33 | ||
34 | static void chksum_init(void *ctx) | 34 | static void chksum_init(struct crypto_tfm *tfm) |
35 | { | 35 | { |
36 | struct chksum_ctx *mctx = ctx; | 36 | struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); |
37 | 37 | ||
38 | mctx->crc = ~(u32)0; /* common usage */ | 38 | mctx->crc = ~(u32)0; /* common usage */ |
39 | } | 39 | } |
@@ -43,10 +43,10 @@ static void chksum_init(void *ctx) | |||
43 | * If your algorithm starts with ~0, then XOR with ~0 before you set | 43 | * If your algorithm starts with ~0, then XOR with ~0 before you set |
44 | * the seed. | 44 | * the seed. |
45 | */ | 45 | */ |
46 | static int chksum_setkey(void *ctx, const u8 *key, unsigned int keylen, | 46 | static int chksum_setkey(struct crypto_tfm *tfm, const u8 *key, |
47 | u32 *flags) | 47 | unsigned int keylen, u32 *flags) |
48 | { | 48 | { |
49 | struct chksum_ctx *mctx = ctx; | 49 | struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); |
50 | 50 | ||
51 | if (keylen != sizeof(mctx->crc)) { | 51 | if (keylen != sizeof(mctx->crc)) { |
52 | if (flags) | 52 | if (flags) |
@@ -57,9 +57,10 @@ static int chksum_setkey(void *ctx, const u8 *key, unsigned int keylen, | |||
57 | return 0; | 57 | return 0; |
58 | } | 58 | } |
59 | 59 | ||
60 | static void chksum_update(void *ctx, const u8 *data, unsigned int length) | 60 | static void chksum_update(struct crypto_tfm *tfm, const u8 *data, |
61 | unsigned int length) | ||
61 | { | 62 | { |
62 | struct chksum_ctx *mctx = ctx; | 63 | struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); |
63 | u32 mcrc; | 64 | u32 mcrc; |
64 | 65 | ||
65 | mcrc = crc32c(mctx->crc, data, (size_t)length); | 66 | mcrc = crc32c(mctx->crc, data, (size_t)length); |
@@ -67,9 +68,9 @@ static void chksum_update(void *ctx, const u8 *data, unsigned int length) | |||
67 | mctx->crc = mcrc; | 68 | mctx->crc = mcrc; |
68 | } | 69 | } |
69 | 70 | ||
70 | static void chksum_final(void *ctx, u8 *out) | 71 | static void chksum_final(struct crypto_tfm *tfm, u8 *out) |
71 | { | 72 | { |
72 | struct chksum_ctx *mctx = ctx; | 73 | struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); |
73 | u32 mcrc = (mctx->crc ^ ~(u32)0); | 74 | u32 mcrc = (mctx->crc ^ ~(u32)0); |
74 | 75 | ||
75 | *(u32 *)out = __le32_to_cpu(mcrc); | 76 | *(u32 *)out = __le32_to_cpu(mcrc); |
diff --git a/crypto/crypto_null.c b/crypto/crypto_null.c index 3fcf6e887e87..a0d956b52949 100644 --- a/crypto/crypto_null.c +++ b/crypto/crypto_null.c | |||
@@ -27,8 +27,8 @@ | |||
27 | #define NULL_BLOCK_SIZE 1 | 27 | #define NULL_BLOCK_SIZE 1 |
28 | #define NULL_DIGEST_SIZE 0 | 28 | #define NULL_DIGEST_SIZE 0 |
29 | 29 | ||
30 | static int null_compress(void *ctx, const u8 *src, unsigned int slen, | 30 | static int null_compress(struct crypto_tfm *tfm, const u8 *src, |
31 | u8 *dst, unsigned int *dlen) | 31 | unsigned int slen, u8 *dst, unsigned int *dlen) |
32 | { | 32 | { |
33 | if (slen > *dlen) | 33 | if (slen > *dlen) |
34 | return -EINVAL; | 34 | return -EINVAL; |
@@ -37,20 +37,21 @@ static int null_compress(void *ctx, const u8 *src, unsigned int slen, | |||
37 | return 0; | 37 | return 0; |
38 | } | 38 | } |
39 | 39 | ||
40 | static void null_init(void *ctx) | 40 | static void null_init(struct crypto_tfm *tfm) |
41 | { } | 41 | { } |
42 | 42 | ||
43 | static void null_update(void *ctx, const u8 *data, unsigned int len) | 43 | static void null_update(struct crypto_tfm *tfm, const u8 *data, |
44 | unsigned int len) | ||
44 | { } | 45 | { } |
45 | 46 | ||
46 | static void null_final(void *ctx, u8 *out) | 47 | static void null_final(struct crypto_tfm *tfm, u8 *out) |
47 | { } | 48 | { } |
48 | 49 | ||
49 | static int null_setkey(void *ctx, const u8 *key, | 50 | static int null_setkey(struct crypto_tfm *tfm, const u8 *key, |
50 | unsigned int keylen, u32 *flags) | 51 | unsigned int keylen, u32 *flags) |
51 | { return 0; } | 52 | { return 0; } |
52 | 53 | ||
53 | static void null_crypt(void *ctx, u8 *dst, const u8 *src) | 54 | static void null_crypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
54 | { | 55 | { |
55 | memcpy(dst, src, NULL_BLOCK_SIZE); | 56 | memcpy(dst, src, NULL_BLOCK_SIZE); |
56 | } | 57 | } |
diff --git a/crypto/deflate.c b/crypto/deflate.c index f209368d62ae..6588bbf82e9b 100644 --- a/crypto/deflate.c +++ b/crypto/deflate.c | |||
@@ -102,8 +102,9 @@ static void deflate_decomp_exit(struct deflate_ctx *ctx) | |||
102 | kfree(ctx->decomp_stream.workspace); | 102 | kfree(ctx->decomp_stream.workspace); |
103 | } | 103 | } |
104 | 104 | ||
105 | static int deflate_init(void *ctx) | 105 | static int deflate_init(struct crypto_tfm *tfm) |
106 | { | 106 | { |
107 | struct deflate_ctx *ctx = crypto_tfm_ctx(tfm); | ||
107 | int ret; | 108 | int ret; |
108 | 109 | ||
109 | ret = deflate_comp_init(ctx); | 110 | ret = deflate_comp_init(ctx); |
@@ -116,17 +117,19 @@ out: | |||
116 | return ret; | 117 | return ret; |
117 | } | 118 | } |
118 | 119 | ||
119 | static void deflate_exit(void *ctx) | 120 | static void deflate_exit(struct crypto_tfm *tfm) |
120 | { | 121 | { |
122 | struct deflate_ctx *ctx = crypto_tfm_ctx(tfm); | ||
123 | |||
121 | deflate_comp_exit(ctx); | 124 | deflate_comp_exit(ctx); |
122 | deflate_decomp_exit(ctx); | 125 | deflate_decomp_exit(ctx); |
123 | } | 126 | } |
124 | 127 | ||
125 | static int deflate_compress(void *ctx, const u8 *src, unsigned int slen, | 128 | static int deflate_compress(struct crypto_tfm *tfm, const u8 *src, |
126 | u8 *dst, unsigned int *dlen) | 129 | unsigned int slen, u8 *dst, unsigned int *dlen) |
127 | { | 130 | { |
128 | int ret = 0; | 131 | int ret = 0; |
129 | struct deflate_ctx *dctx = ctx; | 132 | struct deflate_ctx *dctx = crypto_tfm_ctx(tfm); |
130 | struct z_stream_s *stream = &dctx->comp_stream; | 133 | struct z_stream_s *stream = &dctx->comp_stream; |
131 | 134 | ||
132 | ret = zlib_deflateReset(stream); | 135 | ret = zlib_deflateReset(stream); |
@@ -151,12 +154,12 @@ out: | |||
151 | return ret; | 154 | return ret; |
152 | } | 155 | } |
153 | 156 | ||
154 | static int deflate_decompress(void *ctx, const u8 *src, unsigned int slen, | 157 | static int deflate_decompress(struct crypto_tfm *tfm, const u8 *src, |
155 | u8 *dst, unsigned int *dlen) | 158 | unsigned int slen, u8 *dst, unsigned int *dlen) |
156 | { | 159 | { |
157 | 160 | ||
158 | int ret = 0; | 161 | int ret = 0; |
159 | struct deflate_ctx *dctx = ctx; | 162 | struct deflate_ctx *dctx = crypto_tfm_ctx(tfm); |
160 | struct z_stream_s *stream = &dctx->decomp_stream; | 163 | struct z_stream_s *stream = &dctx->decomp_stream; |
161 | 164 | ||
162 | ret = zlib_inflateReset(stream); | 165 | ret = zlib_inflateReset(stream); |
@@ -198,9 +201,9 @@ static struct crypto_alg alg = { | |||
198 | .cra_ctxsize = sizeof(struct deflate_ctx), | 201 | .cra_ctxsize = sizeof(struct deflate_ctx), |
199 | .cra_module = THIS_MODULE, | 202 | .cra_module = THIS_MODULE, |
200 | .cra_list = LIST_HEAD_INIT(alg.cra_list), | 203 | .cra_list = LIST_HEAD_INIT(alg.cra_list), |
204 | .cra_init = deflate_init, | ||
205 | .cra_exit = deflate_exit, | ||
201 | .cra_u = { .compress = { | 206 | .cra_u = { .compress = { |
202 | .coa_init = deflate_init, | ||
203 | .coa_exit = deflate_exit, | ||
204 | .coa_compress = deflate_compress, | 207 | .coa_compress = deflate_compress, |
205 | .coa_decompress = deflate_decompress } } | 208 | .coa_decompress = deflate_decompress } } |
206 | }; | 209 | }; |
diff --git a/crypto/des.c b/crypto/des.c index 2d74cab40c3e..a9d3c235a6af 100644 --- a/crypto/des.c +++ b/crypto/des.c | |||
@@ -783,9 +783,10 @@ static void dkey(u32 *pe, const u8 *k) | |||
783 | } | 783 | } |
784 | } | 784 | } |
785 | 785 | ||
786 | static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) | 786 | static int des_setkey(struct crypto_tfm *tfm, const u8 *key, |
787 | unsigned int keylen, u32 *flags) | ||
787 | { | 788 | { |
788 | struct des_ctx *dctx = ctx; | 789 | struct des_ctx *dctx = crypto_tfm_ctx(tfm); |
789 | u32 tmp[DES_EXPKEY_WORDS]; | 790 | u32 tmp[DES_EXPKEY_WORDS]; |
790 | int ret; | 791 | int ret; |
791 | 792 | ||
@@ -803,9 +804,10 @@ static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) | |||
803 | return 0; | 804 | return 0; |
804 | } | 805 | } |
805 | 806 | ||
806 | static void des_encrypt(void *ctx, u8 *dst, const u8 *src) | 807 | static void des_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
807 | { | 808 | { |
808 | const u32 *K = ((struct des_ctx *)ctx)->expkey; | 809 | struct des_ctx *ctx = crypto_tfm_ctx(tfm); |
810 | const u32 *K = ctx->expkey; | ||
809 | const __le32 *s = (const __le32 *)src; | 811 | const __le32 *s = (const __le32 *)src; |
810 | __le32 *d = (__le32 *)dst; | 812 | __le32 *d = (__le32 *)dst; |
811 | u32 L, R, A, B; | 813 | u32 L, R, A, B; |
@@ -825,9 +827,10 @@ static void des_encrypt(void *ctx, u8 *dst, const u8 *src) | |||
825 | d[1] = cpu_to_le32(L); | 827 | d[1] = cpu_to_le32(L); |
826 | } | 828 | } |
827 | 829 | ||
828 | static void des_decrypt(void *ctx, u8 *dst, const u8 *src) | 830 | static void des_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
829 | { | 831 | { |
830 | const u32 *K = ((struct des_ctx *)ctx)->expkey + DES_EXPKEY_WORDS - 2; | 832 | struct des_ctx *ctx = crypto_tfm_ctx(tfm); |
833 | const u32 *K = ctx->expkey + DES_EXPKEY_WORDS - 2; | ||
831 | const __le32 *s = (const __le32 *)src; | 834 | const __le32 *s = (const __le32 *)src; |
832 | __le32 *d = (__le32 *)dst; | 835 | __le32 *d = (__le32 *)dst; |
833 | u32 L, R, A, B; | 836 | u32 L, R, A, B; |
@@ -860,11 +863,11 @@ static void des_decrypt(void *ctx, u8 *dst, const u8 *src) | |||
860 | * property. | 863 | * property. |
861 | * | 864 | * |
862 | */ | 865 | */ |
863 | static int des3_ede_setkey(void *ctx, const u8 *key, | 866 | static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key, |
864 | unsigned int keylen, u32 *flags) | 867 | unsigned int keylen, u32 *flags) |
865 | { | 868 | { |
866 | const u32 *K = (const u32 *)key; | 869 | const u32 *K = (const u32 *)key; |
867 | struct des3_ede_ctx *dctx = ctx; | 870 | struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm); |
868 | u32 *expkey = dctx->expkey; | 871 | u32 *expkey = dctx->expkey; |
869 | 872 | ||
870 | if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) || | 873 | if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) || |
@@ -881,9 +884,9 @@ static int des3_ede_setkey(void *ctx, const u8 *key, | |||
881 | return 0; | 884 | return 0; |
882 | } | 885 | } |
883 | 886 | ||
884 | static void des3_ede_encrypt(void *ctx, u8 *dst, const u8 *src) | 887 | static void des3_ede_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
885 | { | 888 | { |
886 | struct des3_ede_ctx *dctx = ctx; | 889 | struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm); |
887 | const u32 *K = dctx->expkey; | 890 | const u32 *K = dctx->expkey; |
888 | const __le32 *s = (const __le32 *)src; | 891 | const __le32 *s = (const __le32 *)src; |
889 | __le32 *d = (__le32 *)dst; | 892 | __le32 *d = (__le32 *)dst; |
@@ -912,9 +915,9 @@ static void des3_ede_encrypt(void *ctx, u8 *dst, const u8 *src) | |||
912 | d[1] = cpu_to_le32(L); | 915 | d[1] = cpu_to_le32(L); |
913 | } | 916 | } |
914 | 917 | ||
915 | static void des3_ede_decrypt(void *ctx, u8 *dst, const u8 *src) | 918 | static void des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
916 | { | 919 | { |
917 | struct des3_ede_ctx *dctx = ctx; | 920 | struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm); |
918 | const u32 *K = dctx->expkey + DES3_EDE_EXPKEY_WORDS - 2; | 921 | const u32 *K = dctx->expkey + DES3_EDE_EXPKEY_WORDS - 2; |
919 | const __le32 *s = (const __le32 *)src; | 922 | const __le32 *s = (const __le32 *)src; |
920 | __le32 *d = (__le32 *)dst; | 923 | __le32 *d = (__le32 *)dst; |
diff --git a/crypto/digest.c b/crypto/digest.c index d9b6ac9dbf8d..603006a7bef2 100644 --- a/crypto/digest.c +++ b/crypto/digest.c | |||
@@ -20,13 +20,14 @@ | |||
20 | 20 | ||
21 | static void init(struct crypto_tfm *tfm) | 21 | static void init(struct crypto_tfm *tfm) |
22 | { | 22 | { |
23 | tfm->__crt_alg->cra_digest.dia_init(crypto_tfm_ctx(tfm)); | 23 | tfm->__crt_alg->cra_digest.dia_init(tfm); |
24 | } | 24 | } |
25 | 25 | ||
26 | static void update(struct crypto_tfm *tfm, | 26 | static void update(struct crypto_tfm *tfm, |
27 | struct scatterlist *sg, unsigned int nsg) | 27 | struct scatterlist *sg, unsigned int nsg) |
28 | { | 28 | { |
29 | unsigned int i; | 29 | unsigned int i; |
30 | unsigned int alignmask = crypto_tfm_alg_alignmask(tfm); | ||
30 | 31 | ||
31 | for (i = 0; i < nsg; i++) { | 32 | for (i = 0; i < nsg; i++) { |
32 | 33 | ||
@@ -38,12 +39,22 @@ static void update(struct crypto_tfm *tfm, | |||
38 | unsigned int bytes_from_page = min(l, ((unsigned int) | 39 | unsigned int bytes_from_page = min(l, ((unsigned int) |
39 | (PAGE_SIZE)) - | 40 | (PAGE_SIZE)) - |
40 | offset); | 41 | offset); |
41 | char *p = crypto_kmap(pg, 0) + offset; | 42 | char *src = crypto_kmap(pg, 0); |
43 | char *p = src + offset; | ||
42 | 44 | ||
43 | tfm->__crt_alg->cra_digest.dia_update | 45 | if (unlikely(offset & alignmask)) { |
44 | (crypto_tfm_ctx(tfm), p, | 46 | unsigned int bytes = |
45 | bytes_from_page); | 47 | alignmask + 1 - (offset & alignmask); |
46 | crypto_kunmap(p, 0); | 48 | bytes = min(bytes, bytes_from_page); |
49 | tfm->__crt_alg->cra_digest.dia_update(tfm, p, | ||
50 | bytes); | ||
51 | p += bytes; | ||
52 | bytes_from_page -= bytes; | ||
53 | l -= bytes; | ||
54 | } | ||
55 | tfm->__crt_alg->cra_digest.dia_update(tfm, p, | ||
56 | bytes_from_page); | ||
57 | crypto_kunmap(src, 0); | ||
47 | crypto_yield(tfm); | 58 | crypto_yield(tfm); |
48 | offset = 0; | 59 | offset = 0; |
49 | pg++; | 60 | pg++; |
@@ -54,7 +65,15 @@ static void update(struct crypto_tfm *tfm, | |||
54 | 65 | ||
55 | static void final(struct crypto_tfm *tfm, u8 *out) | 66 | static void final(struct crypto_tfm *tfm, u8 *out) |
56 | { | 67 | { |
57 | tfm->__crt_alg->cra_digest.dia_final(crypto_tfm_ctx(tfm), out); | 68 | unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); |
69 | if (unlikely((unsigned long)out & alignmask)) { | ||
70 | unsigned int size = crypto_tfm_alg_digestsize(tfm); | ||
71 | u8 buffer[size + alignmask]; | ||
72 | u8 *dst = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | ||
73 | tfm->__crt_alg->cra_digest.dia_final(tfm, dst); | ||
74 | memcpy(out, dst, size); | ||
75 | } else | ||
76 | tfm->__crt_alg->cra_digest.dia_final(tfm, out); | ||
58 | } | 77 | } |
59 | 78 | ||
60 | static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) | 79 | static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) |
@@ -62,25 +81,15 @@ static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) | |||
62 | u32 flags; | 81 | u32 flags; |
63 | if (tfm->__crt_alg->cra_digest.dia_setkey == NULL) | 82 | if (tfm->__crt_alg->cra_digest.dia_setkey == NULL) |
64 | return -ENOSYS; | 83 | return -ENOSYS; |
65 | return tfm->__crt_alg->cra_digest.dia_setkey(crypto_tfm_ctx(tfm), | 84 | return tfm->__crt_alg->cra_digest.dia_setkey(tfm, key, keylen, &flags); |
66 | key, keylen, &flags); | ||
67 | } | 85 | } |
68 | 86 | ||
69 | static void digest(struct crypto_tfm *tfm, | 87 | static void digest(struct crypto_tfm *tfm, |
70 | struct scatterlist *sg, unsigned int nsg, u8 *out) | 88 | struct scatterlist *sg, unsigned int nsg, u8 *out) |
71 | { | 89 | { |
72 | unsigned int i; | 90 | init(tfm); |
73 | 91 | update(tfm, sg, nsg); | |
74 | tfm->crt_digest.dit_init(tfm); | 92 | final(tfm, out); |
75 | |||
76 | for (i = 0; i < nsg; i++) { | ||
77 | char *p = crypto_kmap(sg[i].page, 0) + sg[i].offset; | ||
78 | tfm->__crt_alg->cra_digest.dia_update(crypto_tfm_ctx(tfm), | ||
79 | p, sg[i].length); | ||
80 | crypto_kunmap(p, 0); | ||
81 | crypto_yield(tfm); | ||
82 | } | ||
83 | crypto_digest_final(tfm, out); | ||
84 | } | 93 | } |
85 | 94 | ||
86 | int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags) | 95 | int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags) |
diff --git a/crypto/khazad.c b/crypto/khazad.c index 807f2bf4ea24..d4c9d3657b36 100644 --- a/crypto/khazad.c +++ b/crypto/khazad.c | |||
@@ -754,11 +754,11 @@ static const u64 c[KHAZAD_ROUNDS + 1] = { | |||
754 | 0xccc41d14c363da5dULL, 0x5fdc7dcd7f5a6c5cULL, 0xf726ffede89d6f8eULL | 754 | 0xccc41d14c363da5dULL, 0x5fdc7dcd7f5a6c5cULL, 0xf726ffede89d6f8eULL |
755 | }; | 755 | }; |
756 | 756 | ||
757 | static int khazad_setkey(void *ctx_arg, const u8 *in_key, | 757 | static int khazad_setkey(struct crypto_tfm *tfm, const u8 *in_key, |
758 | unsigned int key_len, u32 *flags) | 758 | unsigned int key_len, u32 *flags) |
759 | { | 759 | { |
760 | struct khazad_ctx *ctx = ctx_arg; | 760 | struct khazad_ctx *ctx = crypto_tfm_ctx(tfm); |
761 | const __be64 *key = (const __be64 *)in_key; | 761 | const __be32 *key = (const __be32 *)in_key; |
762 | int r; | 762 | int r; |
763 | const u64 *S = T7; | 763 | const u64 *S = T7; |
764 | u64 K2, K1; | 764 | u64 K2, K1; |
@@ -769,8 +769,9 @@ static int khazad_setkey(void *ctx_arg, const u8 *in_key, | |||
769 | return -EINVAL; | 769 | return -EINVAL; |
770 | } | 770 | } |
771 | 771 | ||
772 | K2 = be64_to_cpu(key[0]); | 772 | /* key is supposed to be 32-bit aligned */ |
773 | K1 = be64_to_cpu(key[1]); | 773 | K2 = ((u64)be32_to_cpu(key[0]) << 32) | be32_to_cpu(key[1]); |
774 | K1 = ((u64)be32_to_cpu(key[2]) << 32) | be32_to_cpu(key[3]); | ||
774 | 775 | ||
775 | /* setup the encrypt key */ | 776 | /* setup the encrypt key */ |
776 | for (r = 0; r <= KHAZAD_ROUNDS; r++) { | 777 | for (r = 0; r <= KHAZAD_ROUNDS; r++) { |
@@ -840,15 +841,15 @@ static void khazad_crypt(const u64 roundKey[KHAZAD_ROUNDS + 1], | |||
840 | *dst = cpu_to_be64(state); | 841 | *dst = cpu_to_be64(state); |
841 | } | 842 | } |
842 | 843 | ||
843 | static void khazad_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | 844 | static void khazad_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
844 | { | 845 | { |
845 | struct khazad_ctx *ctx = ctx_arg; | 846 | struct khazad_ctx *ctx = crypto_tfm_ctx(tfm); |
846 | khazad_crypt(ctx->E, dst, src); | 847 | khazad_crypt(ctx->E, dst, src); |
847 | } | 848 | } |
848 | 849 | ||
849 | static void khazad_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | 850 | static void khazad_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
850 | { | 851 | { |
851 | struct khazad_ctx *ctx = ctx_arg; | 852 | struct khazad_ctx *ctx = crypto_tfm_ctx(tfm); |
852 | khazad_crypt(ctx->D, dst, src); | 853 | khazad_crypt(ctx->D, dst, src); |
853 | } | 854 | } |
854 | 855 | ||
diff --git a/crypto/md4.c b/crypto/md4.c index a2d6df5c0f8c..c1bc71bdc16b 100644 --- a/crypto/md4.c +++ b/crypto/md4.c | |||
@@ -152,9 +152,9 @@ static inline void md4_transform_helper(struct md4_ctx *ctx) | |||
152 | md4_transform(ctx->hash, ctx->block); | 152 | md4_transform(ctx->hash, ctx->block); |
153 | } | 153 | } |
154 | 154 | ||
155 | static void md4_init(void *ctx) | 155 | static void md4_init(struct crypto_tfm *tfm) |
156 | { | 156 | { |
157 | struct md4_ctx *mctx = ctx; | 157 | struct md4_ctx *mctx = crypto_tfm_ctx(tfm); |
158 | 158 | ||
159 | mctx->hash[0] = 0x67452301; | 159 | mctx->hash[0] = 0x67452301; |
160 | mctx->hash[1] = 0xefcdab89; | 160 | mctx->hash[1] = 0xefcdab89; |
@@ -163,9 +163,9 @@ static void md4_init(void *ctx) | |||
163 | mctx->byte_count = 0; | 163 | mctx->byte_count = 0; |
164 | } | 164 | } |
165 | 165 | ||
166 | static void md4_update(void *ctx, const u8 *data, unsigned int len) | 166 | static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) |
167 | { | 167 | { |
168 | struct md4_ctx *mctx = ctx; | 168 | struct md4_ctx *mctx = crypto_tfm_ctx(tfm); |
169 | const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); | 169 | const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); |
170 | 170 | ||
171 | mctx->byte_count += len; | 171 | mctx->byte_count += len; |
@@ -193,9 +193,9 @@ static void md4_update(void *ctx, const u8 *data, unsigned int len) | |||
193 | memcpy(mctx->block, data, len); | 193 | memcpy(mctx->block, data, len); |
194 | } | 194 | } |
195 | 195 | ||
196 | static void md4_final(void *ctx, u8 *out) | 196 | static void md4_final(struct crypto_tfm *tfm, u8 *out) |
197 | { | 197 | { |
198 | struct md4_ctx *mctx = ctx; | 198 | struct md4_ctx *mctx = crypto_tfm_ctx(tfm); |
199 | const unsigned int offset = mctx->byte_count & 0x3f; | 199 | const unsigned int offset = mctx->byte_count & 0x3f; |
200 | char *p = (char *)mctx->block + offset; | 200 | char *p = (char *)mctx->block + offset; |
201 | int padding = 56 - (offset + 1); | 201 | int padding = 56 - (offset + 1); |
diff --git a/crypto/md5.c b/crypto/md5.c index 7f041aef5da2..93d18e8b3d53 100644 --- a/crypto/md5.c +++ b/crypto/md5.c | |||
@@ -147,9 +147,9 @@ static inline void md5_transform_helper(struct md5_ctx *ctx) | |||
147 | md5_transform(ctx->hash, ctx->block); | 147 | md5_transform(ctx->hash, ctx->block); |
148 | } | 148 | } |
149 | 149 | ||
150 | static void md5_init(void *ctx) | 150 | static void md5_init(struct crypto_tfm *tfm) |
151 | { | 151 | { |
152 | struct md5_ctx *mctx = ctx; | 152 | struct md5_ctx *mctx = crypto_tfm_ctx(tfm); |
153 | 153 | ||
154 | mctx->hash[0] = 0x67452301; | 154 | mctx->hash[0] = 0x67452301; |
155 | mctx->hash[1] = 0xefcdab89; | 155 | mctx->hash[1] = 0xefcdab89; |
@@ -158,9 +158,9 @@ static void md5_init(void *ctx) | |||
158 | mctx->byte_count = 0; | 158 | mctx->byte_count = 0; |
159 | } | 159 | } |
160 | 160 | ||
161 | static void md5_update(void *ctx, const u8 *data, unsigned int len) | 161 | static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) |
162 | { | 162 | { |
163 | struct md5_ctx *mctx = ctx; | 163 | struct md5_ctx *mctx = crypto_tfm_ctx(tfm); |
164 | const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); | 164 | const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); |
165 | 165 | ||
166 | mctx->byte_count += len; | 166 | mctx->byte_count += len; |
@@ -188,9 +188,9 @@ static void md5_update(void *ctx, const u8 *data, unsigned int len) | |||
188 | memcpy(mctx->block, data, len); | 188 | memcpy(mctx->block, data, len); |
189 | } | 189 | } |
190 | 190 | ||
191 | static void md5_final(void *ctx, u8 *out) | 191 | static void md5_final(struct crypto_tfm *tfm, u8 *out) |
192 | { | 192 | { |
193 | struct md5_ctx *mctx = ctx; | 193 | struct md5_ctx *mctx = crypto_tfm_ctx(tfm); |
194 | const unsigned int offset = mctx->byte_count & 0x3f; | 194 | const unsigned int offset = mctx->byte_count & 0x3f; |
195 | char *p = (char *)mctx->block + offset; | 195 | char *p = (char *)mctx->block + offset; |
196 | int padding = 56 - (offset + 1); | 196 | int padding = 56 - (offset + 1); |
diff --git a/crypto/michael_mic.c b/crypto/michael_mic.c index 4f6ab23e14ad..d061da21cfda 100644 --- a/crypto/michael_mic.c +++ b/crypto/michael_mic.c | |||
@@ -45,16 +45,17 @@ do { \ | |||
45 | } while (0) | 45 | } while (0) |
46 | 46 | ||
47 | 47 | ||
48 | static void michael_init(void *ctx) | 48 | static void michael_init(struct crypto_tfm *tfm) |
49 | { | 49 | { |
50 | struct michael_mic_ctx *mctx = ctx; | 50 | struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); |
51 | mctx->pending_len = 0; | 51 | mctx->pending_len = 0; |
52 | } | 52 | } |
53 | 53 | ||
54 | 54 | ||
55 | static void michael_update(void *ctx, const u8 *data, unsigned int len) | 55 | static void michael_update(struct crypto_tfm *tfm, const u8 *data, |
56 | unsigned int len) | ||
56 | { | 57 | { |
57 | struct michael_mic_ctx *mctx = ctx; | 58 | struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); |
58 | const __le32 *src; | 59 | const __le32 *src; |
59 | 60 | ||
60 | if (mctx->pending_len) { | 61 | if (mctx->pending_len) { |
@@ -90,9 +91,9 @@ static void michael_update(void *ctx, const u8 *data, unsigned int len) | |||
90 | } | 91 | } |
91 | 92 | ||
92 | 93 | ||
93 | static void michael_final(void *ctx, u8 *out) | 94 | static void michael_final(struct crypto_tfm *tfm, u8 *out) |
94 | { | 95 | { |
95 | struct michael_mic_ctx *mctx = ctx; | 96 | struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); |
96 | u8 *data = mctx->pending; | 97 | u8 *data = mctx->pending; |
97 | __le32 *dst = (__le32 *)out; | 98 | __le32 *dst = (__le32 *)out; |
98 | 99 | ||
@@ -121,10 +122,10 @@ static void michael_final(void *ctx, u8 *out) | |||
121 | } | 122 | } |
122 | 123 | ||
123 | 124 | ||
124 | static int michael_setkey(void *ctx, const u8 *key, unsigned int keylen, | 125 | static int michael_setkey(struct crypto_tfm *tfm, const u8 *key, |
125 | u32 *flags) | 126 | unsigned int keylen, u32 *flags) |
126 | { | 127 | { |
127 | struct michael_mic_ctx *mctx = ctx; | 128 | struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); |
128 | const __le32 *data = (const __le32 *)key; | 129 | const __le32 *data = (const __le32 *)key; |
129 | 130 | ||
130 | if (keylen != 8) { | 131 | if (keylen != 8) { |
@@ -145,6 +146,7 @@ static struct crypto_alg michael_mic_alg = { | |||
145 | .cra_blocksize = 8, | 146 | .cra_blocksize = 8, |
146 | .cra_ctxsize = sizeof(struct michael_mic_ctx), | 147 | .cra_ctxsize = sizeof(struct michael_mic_ctx), |
147 | .cra_module = THIS_MODULE, | 148 | .cra_module = THIS_MODULE, |
149 | .cra_alignmask = 3, | ||
148 | .cra_list = LIST_HEAD_INIT(michael_mic_alg.cra_list), | 150 | .cra_list = LIST_HEAD_INIT(michael_mic_alg.cra_list), |
149 | .cra_u = { .digest = { | 151 | .cra_u = { .digest = { |
150 | .dia_digestsize = 8, | 152 | .dia_digestsize = 8, |
diff --git a/crypto/serpent.c b/crypto/serpent.c index e366406ab49d..de60cdddbf4a 100644 --- a/crypto/serpent.c +++ b/crypto/serpent.c | |||
@@ -215,9 +215,11 @@ struct serpent_ctx { | |||
215 | }; | 215 | }; |
216 | 216 | ||
217 | 217 | ||
218 | static int serpent_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) | 218 | static int serpent_setkey(struct crypto_tfm *tfm, const u8 *key, |
219 | unsigned int keylen, u32 *flags) | ||
219 | { | 220 | { |
220 | u32 *k = ((struct serpent_ctx *)ctx)->expkey; | 221 | struct serpent_ctx *ctx = crypto_tfm_ctx(tfm); |
222 | u32 *k = ctx->expkey; | ||
221 | u8 *k8 = (u8 *)k; | 223 | u8 *k8 = (u8 *)k; |
222 | u32 r0,r1,r2,r3,r4; | 224 | u32 r0,r1,r2,r3,r4; |
223 | int i; | 225 | int i; |
@@ -365,10 +367,11 @@ static int serpent_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *fl | |||
365 | return 0; | 367 | return 0; |
366 | } | 368 | } |
367 | 369 | ||
368 | static void serpent_encrypt(void *ctx, u8 *dst, const u8 *src) | 370 | static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
369 | { | 371 | { |
372 | struct serpent_ctx *ctx = crypto_tfm_ctx(tfm); | ||
370 | const u32 | 373 | const u32 |
371 | *k = ((struct serpent_ctx *)ctx)->expkey, | 374 | *k = ctx->expkey, |
372 | *s = (const u32 *)src; | 375 | *s = (const u32 *)src; |
373 | u32 *d = (u32 *)dst, | 376 | u32 *d = (u32 *)dst, |
374 | r0, r1, r2, r3, r4; | 377 | r0, r1, r2, r3, r4; |
@@ -423,8 +426,9 @@ static void serpent_encrypt(void *ctx, u8 *dst, const u8 *src) | |||
423 | d[3] = cpu_to_le32(r3); | 426 | d[3] = cpu_to_le32(r3); |
424 | } | 427 | } |
425 | 428 | ||
426 | static void serpent_decrypt(void *ctx, u8 *dst, const u8 *src) | 429 | static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
427 | { | 430 | { |
431 | struct serpent_ctx *ctx = crypto_tfm_ctx(tfm); | ||
428 | const u32 | 432 | const u32 |
429 | *k = ((struct serpent_ctx *)ctx)->expkey, | 433 | *k = ((struct serpent_ctx *)ctx)->expkey, |
430 | *s = (const u32 *)src; | 434 | *s = (const u32 *)src; |
@@ -492,7 +496,8 @@ static struct crypto_alg serpent_alg = { | |||
492 | .cia_decrypt = serpent_decrypt } } | 496 | .cia_decrypt = serpent_decrypt } } |
493 | }; | 497 | }; |
494 | 498 | ||
495 | static int tnepres_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *flags) | 499 | static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key, |
500 | unsigned int keylen, u32 *flags) | ||
496 | { | 501 | { |
497 | u8 rev_key[SERPENT_MAX_KEY_SIZE]; | 502 | u8 rev_key[SERPENT_MAX_KEY_SIZE]; |
498 | int i; | 503 | int i; |
@@ -506,10 +511,10 @@ static int tnepres_setkey(void *ctx, const u8 *key, unsigned int keylen, u32 *fl | |||
506 | for (i = 0; i < keylen; ++i) | 511 | for (i = 0; i < keylen; ++i) |
507 | rev_key[keylen - i - 1] = key[i]; | 512 | rev_key[keylen - i - 1] = key[i]; |
508 | 513 | ||
509 | return serpent_setkey(ctx, rev_key, keylen, flags); | 514 | return serpent_setkey(tfm, rev_key, keylen, flags); |
510 | } | 515 | } |
511 | 516 | ||
512 | static void tnepres_encrypt(void *ctx, u8 *dst, const u8 *src) | 517 | static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
513 | { | 518 | { |
514 | const u32 * const s = (const u32 * const)src; | 519 | const u32 * const s = (const u32 * const)src; |
515 | u32 * const d = (u32 * const)dst; | 520 | u32 * const d = (u32 * const)dst; |
@@ -521,7 +526,7 @@ static void tnepres_encrypt(void *ctx, u8 *dst, const u8 *src) | |||
521 | rs[2] = swab32(s[1]); | 526 | rs[2] = swab32(s[1]); |
522 | rs[3] = swab32(s[0]); | 527 | rs[3] = swab32(s[0]); |
523 | 528 | ||
524 | serpent_encrypt(ctx, (u8 *)rd, (u8 *)rs); | 529 | serpent_encrypt(tfm, (u8 *)rd, (u8 *)rs); |
525 | 530 | ||
526 | d[0] = swab32(rd[3]); | 531 | d[0] = swab32(rd[3]); |
527 | d[1] = swab32(rd[2]); | 532 | d[1] = swab32(rd[2]); |
@@ -529,7 +534,7 @@ static void tnepres_encrypt(void *ctx, u8 *dst, const u8 *src) | |||
529 | d[3] = swab32(rd[0]); | 534 | d[3] = swab32(rd[0]); |
530 | } | 535 | } |
531 | 536 | ||
532 | static void tnepres_decrypt(void *ctx, u8 *dst, const u8 *src) | 537 | static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
533 | { | 538 | { |
534 | const u32 * const s = (const u32 * const)src; | 539 | const u32 * const s = (const u32 * const)src; |
535 | u32 * const d = (u32 * const)dst; | 540 | u32 * const d = (u32 * const)dst; |
@@ -541,7 +546,7 @@ static void tnepres_decrypt(void *ctx, u8 *dst, const u8 *src) | |||
541 | rs[2] = swab32(s[1]); | 546 | rs[2] = swab32(s[1]); |
542 | rs[3] = swab32(s[0]); | 547 | rs[3] = swab32(s[0]); |
543 | 548 | ||
544 | serpent_decrypt(ctx, (u8 *)rd, (u8 *)rs); | 549 | serpent_decrypt(tfm, (u8 *)rd, (u8 *)rs); |
545 | 550 | ||
546 | d[0] = swab32(rd[3]); | 551 | d[0] = swab32(rd[3]); |
547 | d[1] = swab32(rd[2]); | 552 | d[1] = swab32(rd[2]); |
diff --git a/crypto/sha1.c b/crypto/sha1.c index 21571ed35b7e..6c77b689f87e 100644 --- a/crypto/sha1.c +++ b/crypto/sha1.c | |||
@@ -34,9 +34,9 @@ struct sha1_ctx { | |||
34 | u8 buffer[64]; | 34 | u8 buffer[64]; |
35 | }; | 35 | }; |
36 | 36 | ||
37 | static void sha1_init(void *ctx) | 37 | static void sha1_init(struct crypto_tfm *tfm) |
38 | { | 38 | { |
39 | struct sha1_ctx *sctx = ctx; | 39 | struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); |
40 | static const struct sha1_ctx initstate = { | 40 | static const struct sha1_ctx initstate = { |
41 | 0, | 41 | 0, |
42 | { 0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0 }, | 42 | { 0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0 }, |
@@ -46,9 +46,10 @@ static void sha1_init(void *ctx) | |||
46 | *sctx = initstate; | 46 | *sctx = initstate; |
47 | } | 47 | } |
48 | 48 | ||
49 | static void sha1_update(void *ctx, const u8 *data, unsigned int len) | 49 | static void sha1_update(struct crypto_tfm *tfm, const u8 *data, |
50 | unsigned int len) | ||
50 | { | 51 | { |
51 | struct sha1_ctx *sctx = ctx; | 52 | struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); |
52 | unsigned int partial, done; | 53 | unsigned int partial, done; |
53 | const u8 *src; | 54 | const u8 *src; |
54 | 55 | ||
@@ -80,9 +81,9 @@ static void sha1_update(void *ctx, const u8 *data, unsigned int len) | |||
80 | 81 | ||
81 | 82 | ||
82 | /* Add padding and return the message digest. */ | 83 | /* Add padding and return the message digest. */ |
83 | static void sha1_final(void* ctx, u8 *out) | 84 | static void sha1_final(struct crypto_tfm *tfm, u8 *out) |
84 | { | 85 | { |
85 | struct sha1_ctx *sctx = ctx; | 86 | struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); |
86 | __be32 *dst = (__be32 *)out; | 87 | __be32 *dst = (__be32 *)out; |
87 | u32 i, index, padlen; | 88 | u32 i, index, padlen; |
88 | __be64 bits; | 89 | __be64 bits; |
@@ -93,10 +94,10 @@ static void sha1_final(void* ctx, u8 *out) | |||
93 | /* Pad out to 56 mod 64 */ | 94 | /* Pad out to 56 mod 64 */ |
94 | index = sctx->count & 0x3f; | 95 | index = sctx->count & 0x3f; |
95 | padlen = (index < 56) ? (56 - index) : ((64+56) - index); | 96 | padlen = (index < 56) ? (56 - index) : ((64+56) - index); |
96 | sha1_update(sctx, padding, padlen); | 97 | sha1_update(tfm, padding, padlen); |
97 | 98 | ||
98 | /* Append length */ | 99 | /* Append length */ |
99 | sha1_update(sctx, (const u8 *)&bits, sizeof(bits)); | 100 | sha1_update(tfm, (const u8 *)&bits, sizeof(bits)); |
100 | 101 | ||
101 | /* Store state in digest */ | 102 | /* Store state in digest */ |
102 | for (i = 0; i < 5; i++) | 103 | for (i = 0; i < 5; i++) |
@@ -112,6 +113,7 @@ static struct crypto_alg alg = { | |||
112 | .cra_blocksize = SHA1_HMAC_BLOCK_SIZE, | 113 | .cra_blocksize = SHA1_HMAC_BLOCK_SIZE, |
113 | .cra_ctxsize = sizeof(struct sha1_ctx), | 114 | .cra_ctxsize = sizeof(struct sha1_ctx), |
114 | .cra_module = THIS_MODULE, | 115 | .cra_module = THIS_MODULE, |
116 | .cra_alignmask = 3, | ||
115 | .cra_list = LIST_HEAD_INIT(alg.cra_list), | 117 | .cra_list = LIST_HEAD_INIT(alg.cra_list), |
116 | .cra_u = { .digest = { | 118 | .cra_u = { .digest = { |
117 | .dia_digestsize = SHA1_DIGEST_SIZE, | 119 | .dia_digestsize = SHA1_DIGEST_SIZE, |
diff --git a/crypto/sha256.c b/crypto/sha256.c index 9d5ef674d6a9..bc71d85a7d02 100644 --- a/crypto/sha256.c +++ b/crypto/sha256.c | |||
@@ -230,9 +230,9 @@ static void sha256_transform(u32 *state, const u8 *input) | |||
230 | memset(W, 0, 64 * sizeof(u32)); | 230 | memset(W, 0, 64 * sizeof(u32)); |
231 | } | 231 | } |
232 | 232 | ||
233 | static void sha256_init(void *ctx) | 233 | static void sha256_init(struct crypto_tfm *tfm) |
234 | { | 234 | { |
235 | struct sha256_ctx *sctx = ctx; | 235 | struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); |
236 | sctx->state[0] = H0; | 236 | sctx->state[0] = H0; |
237 | sctx->state[1] = H1; | 237 | sctx->state[1] = H1; |
238 | sctx->state[2] = H2; | 238 | sctx->state[2] = H2; |
@@ -242,12 +242,12 @@ static void sha256_init(void *ctx) | |||
242 | sctx->state[6] = H6; | 242 | sctx->state[6] = H6; |
243 | sctx->state[7] = H7; | 243 | sctx->state[7] = H7; |
244 | sctx->count[0] = sctx->count[1] = 0; | 244 | sctx->count[0] = sctx->count[1] = 0; |
245 | memset(sctx->buf, 0, sizeof(sctx->buf)); | ||
246 | } | 245 | } |
247 | 246 | ||
248 | static void sha256_update(void *ctx, const u8 *data, unsigned int len) | 247 | static void sha256_update(struct crypto_tfm *tfm, const u8 *data, |
248 | unsigned int len) | ||
249 | { | 249 | { |
250 | struct sha256_ctx *sctx = ctx; | 250 | struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); |
251 | unsigned int i, index, part_len; | 251 | unsigned int i, index, part_len; |
252 | 252 | ||
253 | /* Compute number of bytes mod 128 */ | 253 | /* Compute number of bytes mod 128 */ |
@@ -277,9 +277,9 @@ static void sha256_update(void *ctx, const u8 *data, unsigned int len) | |||
277 | memcpy(&sctx->buf[index], &data[i], len-i); | 277 | memcpy(&sctx->buf[index], &data[i], len-i); |
278 | } | 278 | } |
279 | 279 | ||
280 | static void sha256_final(void* ctx, u8 *out) | 280 | static void sha256_final(struct crypto_tfm *tfm, u8 *out) |
281 | { | 281 | { |
282 | struct sha256_ctx *sctx = ctx; | 282 | struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); |
283 | __be32 *dst = (__be32 *)out; | 283 | __be32 *dst = (__be32 *)out; |
284 | __be32 bits[2]; | 284 | __be32 bits[2]; |
285 | unsigned int index, pad_len; | 285 | unsigned int index, pad_len; |
@@ -293,10 +293,10 @@ static void sha256_final(void* ctx, u8 *out) | |||
293 | /* Pad out to 56 mod 64. */ | 293 | /* Pad out to 56 mod 64. */ |
294 | index = (sctx->count[0] >> 3) & 0x3f; | 294 | index = (sctx->count[0] >> 3) & 0x3f; |
295 | pad_len = (index < 56) ? (56 - index) : ((64+56) - index); | 295 | pad_len = (index < 56) ? (56 - index) : ((64+56) - index); |
296 | sha256_update(sctx, padding, pad_len); | 296 | sha256_update(tfm, padding, pad_len); |
297 | 297 | ||
298 | /* Append length (before padding) */ | 298 | /* Append length (before padding) */ |
299 | sha256_update(sctx, (const u8 *)bits, sizeof(bits)); | 299 | sha256_update(tfm, (const u8 *)bits, sizeof(bits)); |
300 | 300 | ||
301 | /* Store state in digest */ | 301 | /* Store state in digest */ |
302 | for (i = 0; i < 8; i++) | 302 | for (i = 0; i < 8; i++) |
@@ -313,6 +313,7 @@ static struct crypto_alg alg = { | |||
313 | .cra_blocksize = SHA256_HMAC_BLOCK_SIZE, | 313 | .cra_blocksize = SHA256_HMAC_BLOCK_SIZE, |
314 | .cra_ctxsize = sizeof(struct sha256_ctx), | 314 | .cra_ctxsize = sizeof(struct sha256_ctx), |
315 | .cra_module = THIS_MODULE, | 315 | .cra_module = THIS_MODULE, |
316 | .cra_alignmask = 3, | ||
316 | .cra_list = LIST_HEAD_INIT(alg.cra_list), | 317 | .cra_list = LIST_HEAD_INIT(alg.cra_list), |
317 | .cra_u = { .digest = { | 318 | .cra_u = { .digest = { |
318 | .dia_digestsize = SHA256_DIGEST_SIZE, | 319 | .dia_digestsize = SHA256_DIGEST_SIZE, |
diff --git a/crypto/sha512.c b/crypto/sha512.c index 3e6e9392310c..2dfe7f170b48 100644 --- a/crypto/sha512.c +++ b/crypto/sha512.c | |||
@@ -161,9 +161,9 @@ sha512_transform(u64 *state, u64 *W, const u8 *input) | |||
161 | } | 161 | } |
162 | 162 | ||
163 | static void | 163 | static void |
164 | sha512_init(void *ctx) | 164 | sha512_init(struct crypto_tfm *tfm) |
165 | { | 165 | { |
166 | struct sha512_ctx *sctx = ctx; | 166 | struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); |
167 | sctx->state[0] = H0; | 167 | sctx->state[0] = H0; |
168 | sctx->state[1] = H1; | 168 | sctx->state[1] = H1; |
169 | sctx->state[2] = H2; | 169 | sctx->state[2] = H2; |
@@ -173,13 +173,12 @@ sha512_init(void *ctx) | |||
173 | sctx->state[6] = H6; | 173 | sctx->state[6] = H6; |
174 | sctx->state[7] = H7; | 174 | sctx->state[7] = H7; |
175 | sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; | 175 | sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; |
176 | memset(sctx->buf, 0, sizeof(sctx->buf)); | ||
177 | } | 176 | } |
178 | 177 | ||
179 | static void | 178 | static void |
180 | sha384_init(void *ctx) | 179 | sha384_init(struct crypto_tfm *tfm) |
181 | { | 180 | { |
182 | struct sha512_ctx *sctx = ctx; | 181 | struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); |
183 | sctx->state[0] = HP0; | 182 | sctx->state[0] = HP0; |
184 | sctx->state[1] = HP1; | 183 | sctx->state[1] = HP1; |
185 | sctx->state[2] = HP2; | 184 | sctx->state[2] = HP2; |
@@ -189,13 +188,12 @@ sha384_init(void *ctx) | |||
189 | sctx->state[6] = HP6; | 188 | sctx->state[6] = HP6; |
190 | sctx->state[7] = HP7; | 189 | sctx->state[7] = HP7; |
191 | sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; | 190 | sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; |
192 | memset(sctx->buf, 0, sizeof(sctx->buf)); | ||
193 | } | 191 | } |
194 | 192 | ||
195 | static void | 193 | static void |
196 | sha512_update(void *ctx, const u8 *data, unsigned int len) | 194 | sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) |
197 | { | 195 | { |
198 | struct sha512_ctx *sctx = ctx; | 196 | struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); |
199 | 197 | ||
200 | unsigned int i, index, part_len; | 198 | unsigned int i, index, part_len; |
201 | 199 | ||
@@ -233,9 +231,9 @@ sha512_update(void *ctx, const u8 *data, unsigned int len) | |||
233 | } | 231 | } |
234 | 232 | ||
235 | static void | 233 | static void |
236 | sha512_final(void *ctx, u8 *hash) | 234 | sha512_final(struct crypto_tfm *tfm, u8 *hash) |
237 | { | 235 | { |
238 | struct sha512_ctx *sctx = ctx; | 236 | struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); |
239 | static u8 padding[128] = { 0x80, }; | 237 | static u8 padding[128] = { 0x80, }; |
240 | __be64 *dst = (__be64 *)hash; | 238 | __be64 *dst = (__be64 *)hash; |
241 | __be32 bits[4]; | 239 | __be32 bits[4]; |
@@ -251,10 +249,10 @@ sha512_final(void *ctx, u8 *hash) | |||
251 | /* Pad out to 112 mod 128. */ | 249 | /* Pad out to 112 mod 128. */ |
252 | index = (sctx->count[0] >> 3) & 0x7f; | 250 | index = (sctx->count[0] >> 3) & 0x7f; |
253 | pad_len = (index < 112) ? (112 - index) : ((128+112) - index); | 251 | pad_len = (index < 112) ? (112 - index) : ((128+112) - index); |
254 | sha512_update(sctx, padding, pad_len); | 252 | sha512_update(tfm, padding, pad_len); |
255 | 253 | ||
256 | /* Append length (before padding) */ | 254 | /* Append length (before padding) */ |
257 | sha512_update(sctx, (const u8 *)bits, sizeof(bits)); | 255 | sha512_update(tfm, (const u8 *)bits, sizeof(bits)); |
258 | 256 | ||
259 | /* Store state in digest */ | 257 | /* Store state in digest */ |
260 | for (i = 0; i < 8; i++) | 258 | for (i = 0; i < 8; i++) |
@@ -264,12 +262,11 @@ sha512_final(void *ctx, u8 *hash) | |||
264 | memset(sctx, 0, sizeof(struct sha512_ctx)); | 262 | memset(sctx, 0, sizeof(struct sha512_ctx)); |
265 | } | 263 | } |
266 | 264 | ||
267 | static void sha384_final(void *ctx, u8 *hash) | 265 | static void sha384_final(struct crypto_tfm *tfm, u8 *hash) |
268 | { | 266 | { |
269 | struct sha512_ctx *sctx = ctx; | ||
270 | u8 D[64]; | 267 | u8 D[64]; |
271 | 268 | ||
272 | sha512_final(sctx, D); | 269 | sha512_final(tfm, D); |
273 | 270 | ||
274 | memcpy(hash, D, 48); | 271 | memcpy(hash, D, 48); |
275 | memset(D, 0, 64); | 272 | memset(D, 0, 64); |
@@ -281,6 +278,7 @@ static struct crypto_alg sha512 = { | |||
281 | .cra_blocksize = SHA512_HMAC_BLOCK_SIZE, | 278 | .cra_blocksize = SHA512_HMAC_BLOCK_SIZE, |
282 | .cra_ctxsize = sizeof(struct sha512_ctx), | 279 | .cra_ctxsize = sizeof(struct sha512_ctx), |
283 | .cra_module = THIS_MODULE, | 280 | .cra_module = THIS_MODULE, |
281 | .cra_alignmask = 3, | ||
284 | .cra_list = LIST_HEAD_INIT(sha512.cra_list), | 282 | .cra_list = LIST_HEAD_INIT(sha512.cra_list), |
285 | .cra_u = { .digest = { | 283 | .cra_u = { .digest = { |
286 | .dia_digestsize = SHA512_DIGEST_SIZE, | 284 | .dia_digestsize = SHA512_DIGEST_SIZE, |
@@ -295,6 +293,7 @@ static struct crypto_alg sha384 = { | |||
295 | .cra_flags = CRYPTO_ALG_TYPE_DIGEST, | 293 | .cra_flags = CRYPTO_ALG_TYPE_DIGEST, |
296 | .cra_blocksize = SHA384_HMAC_BLOCK_SIZE, | 294 | .cra_blocksize = SHA384_HMAC_BLOCK_SIZE, |
297 | .cra_ctxsize = sizeof(struct sha512_ctx), | 295 | .cra_ctxsize = sizeof(struct sha512_ctx), |
296 | .cra_alignmask = 3, | ||
298 | .cra_module = THIS_MODULE, | 297 | .cra_module = THIS_MODULE, |
299 | .cra_list = LIST_HEAD_INIT(sha384.cra_list), | 298 | .cra_list = LIST_HEAD_INIT(sha384.cra_list), |
300 | .cra_u = { .digest = { | 299 | .cra_u = { .digest = { |
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c index 49e344f00806..e52f56c5bd5e 100644 --- a/crypto/tcrypt.c +++ b/crypto/tcrypt.c | |||
@@ -570,6 +570,122 @@ out: | |||
570 | crypto_free_tfm(tfm); | 570 | crypto_free_tfm(tfm); |
571 | } | 571 | } |
572 | 572 | ||
573 | static void test_digest_jiffies(struct crypto_tfm *tfm, char *p, int blen, | ||
574 | int plen, char *out, int sec) | ||
575 | { | ||
576 | struct scatterlist sg[1]; | ||
577 | unsigned long start, end; | ||
578 | int bcount, pcount; | ||
579 | |||
580 | for (start = jiffies, end = start + sec * HZ, bcount = 0; | ||
581 | time_before(jiffies, end); bcount++) { | ||
582 | crypto_digest_init(tfm); | ||
583 | for (pcount = 0; pcount < blen; pcount += plen) { | ||
584 | sg_set_buf(sg, p + pcount, plen); | ||
585 | crypto_digest_update(tfm, sg, 1); | ||
586 | } | ||
587 | /* we assume there is enough space in 'out' for the result */ | ||
588 | crypto_digest_final(tfm, out); | ||
589 | } | ||
590 | |||
591 | printk("%6u opers/sec, %9lu bytes/sec\n", | ||
592 | bcount / sec, ((long)bcount * blen) / sec); | ||
593 | |||
594 | return; | ||
595 | } | ||
596 | |||
597 | static void test_digest_cycles(struct crypto_tfm *tfm, char *p, int blen, | ||
598 | int plen, char *out) | ||
599 | { | ||
600 | struct scatterlist sg[1]; | ||
601 | unsigned long cycles = 0; | ||
602 | int i, pcount; | ||
603 | |||
604 | local_bh_disable(); | ||
605 | local_irq_disable(); | ||
606 | |||
607 | /* Warm-up run. */ | ||
608 | for (i = 0; i < 4; i++) { | ||
609 | crypto_digest_init(tfm); | ||
610 | for (pcount = 0; pcount < blen; pcount += plen) { | ||
611 | sg_set_buf(sg, p + pcount, plen); | ||
612 | crypto_digest_update(tfm, sg, 1); | ||
613 | } | ||
614 | crypto_digest_final(tfm, out); | ||
615 | } | ||
616 | |||
617 | /* The real thing. */ | ||
618 | for (i = 0; i < 8; i++) { | ||
619 | cycles_t start, end; | ||
620 | |||
621 | crypto_digest_init(tfm); | ||
622 | |||
623 | start = get_cycles(); | ||
624 | |||
625 | for (pcount = 0; pcount < blen; pcount += plen) { | ||
626 | sg_set_buf(sg, p + pcount, plen); | ||
627 | crypto_digest_update(tfm, sg, 1); | ||
628 | } | ||
629 | crypto_digest_final(tfm, out); | ||
630 | |||
631 | end = get_cycles(); | ||
632 | |||
633 | cycles += end - start; | ||
634 | } | ||
635 | |||
636 | local_irq_enable(); | ||
637 | local_bh_enable(); | ||
638 | |||
639 | printk("%6lu cycles/operation, %4lu cycles/byte\n", | ||
640 | cycles / 8, cycles / (8 * blen)); | ||
641 | |||
642 | return; | ||
643 | } | ||
644 | |||
645 | static void test_digest_speed(char *algo, unsigned int sec, | ||
646 | struct digest_speed *speed) | ||
647 | { | ||
648 | struct crypto_tfm *tfm; | ||
649 | char output[1024]; | ||
650 | int i; | ||
651 | |||
652 | printk("\ntesting speed of %s\n", algo); | ||
653 | |||
654 | tfm = crypto_alloc_tfm(algo, 0); | ||
655 | |||
656 | if (tfm == NULL) { | ||
657 | printk("failed to load transform for %s\n", algo); | ||
658 | return; | ||
659 | } | ||
660 | |||
661 | if (crypto_tfm_alg_digestsize(tfm) > sizeof(output)) { | ||
662 | printk("digestsize(%u) > outputbuffer(%zu)\n", | ||
663 | crypto_tfm_alg_digestsize(tfm), sizeof(output)); | ||
664 | goto out; | ||
665 | } | ||
666 | |||
667 | for (i = 0; speed[i].blen != 0; i++) { | ||
668 | if (speed[i].blen > TVMEMSIZE) { | ||
669 | printk("template (%u) too big for tvmem (%u)\n", | ||
670 | speed[i].blen, TVMEMSIZE); | ||
671 | goto out; | ||
672 | } | ||
673 | |||
674 | printk("test%3u (%5u byte blocks,%5u bytes per update,%4u updates): ", | ||
675 | i, speed[i].blen, speed[i].plen, speed[i].blen / speed[i].plen); | ||
676 | |||
677 | memset(tvmem, 0xff, speed[i].blen); | ||
678 | |||
679 | if (sec) | ||
680 | test_digest_jiffies(tfm, tvmem, speed[i].blen, speed[i].plen, output, sec); | ||
681 | else | ||
682 | test_digest_cycles(tfm, tvmem, speed[i].blen, speed[i].plen, output); | ||
683 | } | ||
684 | |||
685 | out: | ||
686 | crypto_free_tfm(tfm); | ||
687 | } | ||
688 | |||
573 | static void test_deflate(void) | 689 | static void test_deflate(void) |
574 | { | 690 | { |
575 | unsigned int i; | 691 | unsigned int i; |
@@ -1086,6 +1202,60 @@ static void do_test(void) | |||
1086 | des_speed_template); | 1202 | des_speed_template); |
1087 | break; | 1203 | break; |
1088 | 1204 | ||
1205 | case 300: | ||
1206 | /* fall through */ | ||
1207 | |||
1208 | case 301: | ||
1209 | test_digest_speed("md4", sec, generic_digest_speed_template); | ||
1210 | if (mode > 300 && mode < 400) break; | ||
1211 | |||
1212 | case 302: | ||
1213 | test_digest_speed("md5", sec, generic_digest_speed_template); | ||
1214 | if (mode > 300 && mode < 400) break; | ||
1215 | |||
1216 | case 303: | ||
1217 | test_digest_speed("sha1", sec, generic_digest_speed_template); | ||
1218 | if (mode > 300 && mode < 400) break; | ||
1219 | |||
1220 | case 304: | ||
1221 | test_digest_speed("sha256", sec, generic_digest_speed_template); | ||
1222 | if (mode > 300 && mode < 400) break; | ||
1223 | |||
1224 | case 305: | ||
1225 | test_digest_speed("sha384", sec, generic_digest_speed_template); | ||
1226 | if (mode > 300 && mode < 400) break; | ||
1227 | |||
1228 | case 306: | ||
1229 | test_digest_speed("sha512", sec, generic_digest_speed_template); | ||
1230 | if (mode > 300 && mode < 400) break; | ||
1231 | |||
1232 | case 307: | ||
1233 | test_digest_speed("wp256", sec, generic_digest_speed_template); | ||
1234 | if (mode > 300 && mode < 400) break; | ||
1235 | |||
1236 | case 308: | ||
1237 | test_digest_speed("wp384", sec, generic_digest_speed_template); | ||
1238 | if (mode > 300 && mode < 400) break; | ||
1239 | |||
1240 | case 309: | ||
1241 | test_digest_speed("wp512", sec, generic_digest_speed_template); | ||
1242 | if (mode > 300 && mode < 400) break; | ||
1243 | |||
1244 | case 310: | ||
1245 | test_digest_speed("tgr128", sec, generic_digest_speed_template); | ||
1246 | if (mode > 300 && mode < 400) break; | ||
1247 | |||
1248 | case 311: | ||
1249 | test_digest_speed("tgr160", sec, generic_digest_speed_template); | ||
1250 | if (mode > 300 && mode < 400) break; | ||
1251 | |||
1252 | case 312: | ||
1253 | test_digest_speed("tgr192", sec, generic_digest_speed_template); | ||
1254 | if (mode > 300 && mode < 400) break; | ||
1255 | |||
1256 | case 399: | ||
1257 | break; | ||
1258 | |||
1089 | case 1000: | 1259 | case 1000: |
1090 | test_available(); | 1260 | test_available(); |
1091 | break; | 1261 | break; |
@@ -1113,7 +1283,14 @@ static int __init init(void) | |||
1113 | 1283 | ||
1114 | kfree(xbuf); | 1284 | kfree(xbuf); |
1115 | kfree(tvmem); | 1285 | kfree(tvmem); |
1116 | return 0; | 1286 | |
1287 | /* We intentionaly return -EAGAIN to prevent keeping | ||
1288 | * the module. It does all its work from init() | ||
1289 | * and doesn't offer any runtime functionality | ||
1290 | * => we don't need it in the memory, do we? | ||
1291 | * -- mludvig | ||
1292 | */ | ||
1293 | return -EAGAIN; | ||
1117 | } | 1294 | } |
1118 | 1295 | ||
1119 | /* | 1296 | /* |
diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h index 1f683ba794ee..1fac5602f633 100644 --- a/crypto/tcrypt.h +++ b/crypto/tcrypt.h | |||
@@ -65,6 +65,11 @@ struct cipher_speed { | |||
65 | unsigned int blen; | 65 | unsigned int blen; |
66 | }; | 66 | }; |
67 | 67 | ||
68 | struct digest_speed { | ||
69 | unsigned int blen; /* buffer length */ | ||
70 | unsigned int plen; /* per-update length */ | ||
71 | }; | ||
72 | |||
68 | /* | 73 | /* |
69 | * MD4 test vectors from RFC1320 | 74 | * MD4 test vectors from RFC1320 |
70 | */ | 75 | */ |
@@ -2975,4 +2980,35 @@ static struct cipher_speed des_speed_template[] = { | |||
2975 | { .klen = 0, .blen = 0, } | 2980 | { .klen = 0, .blen = 0, } |
2976 | }; | 2981 | }; |
2977 | 2982 | ||
2983 | /* | ||
2984 | * Digest speed tests | ||
2985 | */ | ||
2986 | static struct digest_speed generic_digest_speed_template[] = { | ||
2987 | { .blen = 16, .plen = 16, }, | ||
2988 | { .blen = 64, .plen = 16, }, | ||
2989 | { .blen = 64, .plen = 64, }, | ||
2990 | { .blen = 256, .plen = 16, }, | ||
2991 | { .blen = 256, .plen = 64, }, | ||
2992 | { .blen = 256, .plen = 256, }, | ||
2993 | { .blen = 1024, .plen = 16, }, | ||
2994 | { .blen = 1024, .plen = 256, }, | ||
2995 | { .blen = 1024, .plen = 1024, }, | ||
2996 | { .blen = 2048, .plen = 16, }, | ||
2997 | { .blen = 2048, .plen = 256, }, | ||
2998 | { .blen = 2048, .plen = 1024, }, | ||
2999 | { .blen = 2048, .plen = 2048, }, | ||
3000 | { .blen = 4096, .plen = 16, }, | ||
3001 | { .blen = 4096, .plen = 256, }, | ||
3002 | { .blen = 4096, .plen = 1024, }, | ||
3003 | { .blen = 4096, .plen = 4096, }, | ||
3004 | { .blen = 8192, .plen = 16, }, | ||
3005 | { .blen = 8192, .plen = 256, }, | ||
3006 | { .blen = 8192, .plen = 1024, }, | ||
3007 | { .blen = 8192, .plen = 4096, }, | ||
3008 | { .blen = 8192, .plen = 8192, }, | ||
3009 | |||
3010 | /* End marker */ | ||
3011 | { .blen = 0, .plen = 0, } | ||
3012 | }; | ||
3013 | |||
2978 | #endif /* _CRYPTO_TCRYPT_H */ | 3014 | #endif /* _CRYPTO_TCRYPT_H */ |
diff --git a/crypto/tea.c b/crypto/tea.c index a6a02b30e470..5367adc82fc9 100644 --- a/crypto/tea.c +++ b/crypto/tea.c | |||
@@ -45,10 +45,10 @@ struct xtea_ctx { | |||
45 | u32 KEY[4]; | 45 | u32 KEY[4]; |
46 | }; | 46 | }; |
47 | 47 | ||
48 | static int tea_setkey(void *ctx_arg, const u8 *in_key, | 48 | static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key, |
49 | unsigned int key_len, u32 *flags) | 49 | unsigned int key_len, u32 *flags) |
50 | { | 50 | { |
51 | struct tea_ctx *ctx = ctx_arg; | 51 | struct tea_ctx *ctx = crypto_tfm_ctx(tfm); |
52 | const __le32 *key = (const __le32 *)in_key; | 52 | const __le32 *key = (const __le32 *)in_key; |
53 | 53 | ||
54 | if (key_len != 16) | 54 | if (key_len != 16) |
@@ -66,12 +66,11 @@ static int tea_setkey(void *ctx_arg, const u8 *in_key, | |||
66 | 66 | ||
67 | } | 67 | } |
68 | 68 | ||
69 | static void tea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | 69 | static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
70 | { | 70 | { |
71 | u32 y, z, n, sum = 0; | 71 | u32 y, z, n, sum = 0; |
72 | u32 k0, k1, k2, k3; | 72 | u32 k0, k1, k2, k3; |
73 | 73 | struct tea_ctx *ctx = crypto_tfm_ctx(tfm); | |
74 | struct tea_ctx *ctx = ctx_arg; | ||
75 | const __le32 *in = (const __le32 *)src; | 74 | const __le32 *in = (const __le32 *)src; |
76 | __le32 *out = (__le32 *)dst; | 75 | __le32 *out = (__le32 *)dst; |
77 | 76 | ||
@@ -95,11 +94,11 @@ static void tea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
95 | out[1] = cpu_to_le32(z); | 94 | out[1] = cpu_to_le32(z); |
96 | } | 95 | } |
97 | 96 | ||
98 | static void tea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | 97 | static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
99 | { | 98 | { |
100 | u32 y, z, n, sum; | 99 | u32 y, z, n, sum; |
101 | u32 k0, k1, k2, k3; | 100 | u32 k0, k1, k2, k3; |
102 | struct tea_ctx *ctx = ctx_arg; | 101 | struct tea_ctx *ctx = crypto_tfm_ctx(tfm); |
103 | const __le32 *in = (const __le32 *)src; | 102 | const __le32 *in = (const __le32 *)src; |
104 | __le32 *out = (__le32 *)dst; | 103 | __le32 *out = (__le32 *)dst; |
105 | 104 | ||
@@ -125,10 +124,10 @@ static void tea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
125 | out[1] = cpu_to_le32(z); | 124 | out[1] = cpu_to_le32(z); |
126 | } | 125 | } |
127 | 126 | ||
128 | static int xtea_setkey(void *ctx_arg, const u8 *in_key, | 127 | static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key, |
129 | unsigned int key_len, u32 *flags) | 128 | unsigned int key_len, u32 *flags) |
130 | { | 129 | { |
131 | struct xtea_ctx *ctx = ctx_arg; | 130 | struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); |
132 | const __le32 *key = (const __le32 *)in_key; | 131 | const __le32 *key = (const __le32 *)in_key; |
133 | 132 | ||
134 | if (key_len != 16) | 133 | if (key_len != 16) |
@@ -146,12 +145,11 @@ static int xtea_setkey(void *ctx_arg, const u8 *in_key, | |||
146 | 145 | ||
147 | } | 146 | } |
148 | 147 | ||
149 | static void xtea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | 148 | static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
150 | { | 149 | { |
151 | u32 y, z, sum = 0; | 150 | u32 y, z, sum = 0; |
152 | u32 limit = XTEA_DELTA * XTEA_ROUNDS; | 151 | u32 limit = XTEA_DELTA * XTEA_ROUNDS; |
153 | 152 | struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); | |
154 | struct xtea_ctx *ctx = ctx_arg; | ||
155 | const __le32 *in = (const __le32 *)src; | 153 | const __le32 *in = (const __le32 *)src; |
156 | __le32 *out = (__le32 *)dst; | 154 | __le32 *out = (__le32 *)dst; |
157 | 155 | ||
@@ -168,10 +166,10 @@ static void xtea_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
168 | out[1] = cpu_to_le32(z); | 166 | out[1] = cpu_to_le32(z); |
169 | } | 167 | } |
170 | 168 | ||
171 | static void xtea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | 169 | static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
172 | { | 170 | { |
173 | u32 y, z, sum; | 171 | u32 y, z, sum; |
174 | struct tea_ctx *ctx = ctx_arg; | 172 | struct tea_ctx *ctx = crypto_tfm_ctx(tfm); |
175 | const __le32 *in = (const __le32 *)src; | 173 | const __le32 *in = (const __le32 *)src; |
176 | __le32 *out = (__le32 *)dst; | 174 | __le32 *out = (__le32 *)dst; |
177 | 175 | ||
@@ -191,12 +189,11 @@ static void xtea_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
191 | } | 189 | } |
192 | 190 | ||
193 | 191 | ||
194 | static void xeta_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | 192 | static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
195 | { | 193 | { |
196 | u32 y, z, sum = 0; | 194 | u32 y, z, sum = 0; |
197 | u32 limit = XTEA_DELTA * XTEA_ROUNDS; | 195 | u32 limit = XTEA_DELTA * XTEA_ROUNDS; |
198 | 196 | struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); | |
199 | struct xtea_ctx *ctx = ctx_arg; | ||
200 | const __le32 *in = (const __le32 *)src; | 197 | const __le32 *in = (const __le32 *)src; |
201 | __le32 *out = (__le32 *)dst; | 198 | __le32 *out = (__le32 *)dst; |
202 | 199 | ||
@@ -213,10 +210,10 @@ static void xeta_encrypt(void *ctx_arg, u8 *dst, const u8 *src) | |||
213 | out[1] = cpu_to_le32(z); | 210 | out[1] = cpu_to_le32(z); |
214 | } | 211 | } |
215 | 212 | ||
216 | static void xeta_decrypt(void *ctx_arg, u8 *dst, const u8 *src) | 213 | static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
217 | { | 214 | { |
218 | u32 y, z, sum; | 215 | u32 y, z, sum; |
219 | struct tea_ctx *ctx = ctx_arg; | 216 | struct tea_ctx *ctx = crypto_tfm_ctx(tfm); |
220 | const __le32 *in = (const __le32 *)src; | 217 | const __le32 *in = (const __le32 *)src; |
221 | __le32 *out = (__le32 *)dst; | 218 | __le32 *out = (__le32 *)dst; |
222 | 219 | ||
diff --git a/crypto/tgr192.c b/crypto/tgr192.c index 2d8e44f6fbe9..a0fadf3dd3e2 100644 --- a/crypto/tgr192.c +++ b/crypto/tgr192.c | |||
@@ -496,11 +496,10 @@ static void tgr192_transform(struct tgr192_ctx *tctx, const u8 * data) | |||
496 | tctx->c = c; | 496 | tctx->c = c; |
497 | } | 497 | } |
498 | 498 | ||
499 | static void tgr192_init(void *ctx) | 499 | static void tgr192_init(struct crypto_tfm *tfm) |
500 | { | 500 | { |
501 | struct tgr192_ctx *tctx = ctx; | 501 | struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm); |
502 | 502 | ||
503 | memset (tctx->hash, 0, 64); | ||
504 | tctx->a = 0x0123456789abcdefULL; | 503 | tctx->a = 0x0123456789abcdefULL; |
505 | tctx->b = 0xfedcba9876543210ULL; | 504 | tctx->b = 0xfedcba9876543210ULL; |
506 | tctx->c = 0xf096a5b4c3b2e187ULL; | 505 | tctx->c = 0xf096a5b4c3b2e187ULL; |
@@ -511,9 +510,10 @@ static void tgr192_init(void *ctx) | |||
511 | 510 | ||
512 | /* Update the message digest with the contents | 511 | /* Update the message digest with the contents |
513 | * of INBUF with length INLEN. */ | 512 | * of INBUF with length INLEN. */ |
514 | static void tgr192_update(void *ctx, const u8 * inbuf, unsigned int len) | 513 | static void tgr192_update(struct crypto_tfm *tfm, const u8 *inbuf, |
514 | unsigned int len) | ||
515 | { | 515 | { |
516 | struct tgr192_ctx *tctx = ctx; | 516 | struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm); |
517 | 517 | ||
518 | if (tctx->count == 64) { /* flush the buffer */ | 518 | if (tctx->count == 64) { /* flush the buffer */ |
519 | tgr192_transform(tctx, tctx->hash); | 519 | tgr192_transform(tctx, tctx->hash); |
@@ -527,7 +527,7 @@ static void tgr192_update(void *ctx, const u8 * inbuf, unsigned int len) | |||
527 | for (; len && tctx->count < 64; len--) { | 527 | for (; len && tctx->count < 64; len--) { |
528 | tctx->hash[tctx->count++] = *inbuf++; | 528 | tctx->hash[tctx->count++] = *inbuf++; |
529 | } | 529 | } |
530 | tgr192_update(tctx, NULL, 0); | 530 | tgr192_update(tfm, NULL, 0); |
531 | if (!len) { | 531 | if (!len) { |
532 | return; | 532 | return; |
533 | } | 533 | } |
@@ -549,15 +549,15 @@ static void tgr192_update(void *ctx, const u8 * inbuf, unsigned int len) | |||
549 | 549 | ||
550 | 550 | ||
551 | /* The routine terminates the computation */ | 551 | /* The routine terminates the computation */ |
552 | static void tgr192_final(void *ctx, u8 * out) | 552 | static void tgr192_final(struct crypto_tfm *tfm, u8 * out) |
553 | { | 553 | { |
554 | struct tgr192_ctx *tctx = ctx; | 554 | struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm); |
555 | __be64 *dst = (__be64 *)out; | 555 | __be64 *dst = (__be64 *)out; |
556 | __be64 *be64p; | 556 | __be64 *be64p; |
557 | __le32 *le32p; | 557 | __le32 *le32p; |
558 | u32 t, msb, lsb; | 558 | u32 t, msb, lsb; |
559 | 559 | ||
560 | tgr192_update(tctx, NULL, 0); /* flush */ ; | 560 | tgr192_update(tfm, NULL, 0); /* flush */ ; |
561 | 561 | ||
562 | msb = 0; | 562 | msb = 0; |
563 | t = tctx->nblocks; | 563 | t = tctx->nblocks; |
@@ -585,7 +585,7 @@ static void tgr192_final(void *ctx, u8 * out) | |||
585 | while (tctx->count < 64) { | 585 | while (tctx->count < 64) { |
586 | tctx->hash[tctx->count++] = 0; | 586 | tctx->hash[tctx->count++] = 0; |
587 | } | 587 | } |
588 | tgr192_update(tctx, NULL, 0); /* flush */ ; | 588 | tgr192_update(tfm, NULL, 0); /* flush */ ; |
589 | memset(tctx->hash, 0, 56); /* fill next block with zeroes */ | 589 | memset(tctx->hash, 0, 56); /* fill next block with zeroes */ |
590 | } | 590 | } |
591 | /* append the 64 bit count */ | 591 | /* append the 64 bit count */ |
@@ -601,22 +601,20 @@ static void tgr192_final(void *ctx, u8 * out) | |||
601 | dst[2] = be64p[2] = cpu_to_be64(tctx->c); | 601 | dst[2] = be64p[2] = cpu_to_be64(tctx->c); |
602 | } | 602 | } |
603 | 603 | ||
604 | static void tgr160_final(void *ctx, u8 * out) | 604 | static void tgr160_final(struct crypto_tfm *tfm, u8 * out) |
605 | { | 605 | { |
606 | struct tgr192_ctx *wctx = ctx; | ||
607 | u8 D[64]; | 606 | u8 D[64]; |
608 | 607 | ||
609 | tgr192_final(wctx, D); | 608 | tgr192_final(tfm, D); |
610 | memcpy(out, D, TGR160_DIGEST_SIZE); | 609 | memcpy(out, D, TGR160_DIGEST_SIZE); |
611 | memset(D, 0, TGR192_DIGEST_SIZE); | 610 | memset(D, 0, TGR192_DIGEST_SIZE); |
612 | } | 611 | } |
613 | 612 | ||
614 | static void tgr128_final(void *ctx, u8 * out) | 613 | static void tgr128_final(struct crypto_tfm *tfm, u8 * out) |
615 | { | 614 | { |
616 | struct tgr192_ctx *wctx = ctx; | ||
617 | u8 D[64]; | 615 | u8 D[64]; |
618 | 616 | ||
619 | tgr192_final(wctx, D); | 617 | tgr192_final(tfm, D); |
620 | memcpy(out, D, TGR128_DIGEST_SIZE); | 618 | memcpy(out, D, TGR128_DIGEST_SIZE); |
621 | memset(D, 0, TGR192_DIGEST_SIZE); | 619 | memset(D, 0, TGR192_DIGEST_SIZE); |
622 | } | 620 | } |
@@ -627,6 +625,7 @@ static struct crypto_alg tgr192 = { | |||
627 | .cra_blocksize = TGR192_BLOCK_SIZE, | 625 | .cra_blocksize = TGR192_BLOCK_SIZE, |
628 | .cra_ctxsize = sizeof(struct tgr192_ctx), | 626 | .cra_ctxsize = sizeof(struct tgr192_ctx), |
629 | .cra_module = THIS_MODULE, | 627 | .cra_module = THIS_MODULE, |
628 | .cra_alignmask = 7, | ||
630 | .cra_list = LIST_HEAD_INIT(tgr192.cra_list), | 629 | .cra_list = LIST_HEAD_INIT(tgr192.cra_list), |
631 | .cra_u = {.digest = { | 630 | .cra_u = {.digest = { |
632 | .dia_digestsize = TGR192_DIGEST_SIZE, | 631 | .dia_digestsize = TGR192_DIGEST_SIZE, |
@@ -641,6 +640,7 @@ static struct crypto_alg tgr160 = { | |||
641 | .cra_blocksize = TGR192_BLOCK_SIZE, | 640 | .cra_blocksize = TGR192_BLOCK_SIZE, |
642 | .cra_ctxsize = sizeof(struct tgr192_ctx), | 641 | .cra_ctxsize = sizeof(struct tgr192_ctx), |
643 | .cra_module = THIS_MODULE, | 642 | .cra_module = THIS_MODULE, |
643 | .cra_alignmask = 7, | ||
644 | .cra_list = LIST_HEAD_INIT(tgr160.cra_list), | 644 | .cra_list = LIST_HEAD_INIT(tgr160.cra_list), |
645 | .cra_u = {.digest = { | 645 | .cra_u = {.digest = { |
646 | .dia_digestsize = TGR160_DIGEST_SIZE, | 646 | .dia_digestsize = TGR160_DIGEST_SIZE, |
@@ -655,6 +655,7 @@ static struct crypto_alg tgr128 = { | |||
655 | .cra_blocksize = TGR192_BLOCK_SIZE, | 655 | .cra_blocksize = TGR192_BLOCK_SIZE, |
656 | .cra_ctxsize = sizeof(struct tgr192_ctx), | 656 | .cra_ctxsize = sizeof(struct tgr192_ctx), |
657 | .cra_module = THIS_MODULE, | 657 | .cra_module = THIS_MODULE, |
658 | .cra_alignmask = 7, | ||
658 | .cra_list = LIST_HEAD_INIT(tgr128.cra_list), | 659 | .cra_list = LIST_HEAD_INIT(tgr128.cra_list), |
659 | .cra_u = {.digest = { | 660 | .cra_u = {.digest = { |
660 | .dia_digestsize = TGR128_DIGEST_SIZE, | 661 | .dia_digestsize = TGR128_DIGEST_SIZE, |
diff --git a/crypto/twofish.c b/crypto/twofish.c index ddfd5a3fcc5f..ec2488242e2d 100644 --- a/crypto/twofish.c +++ b/crypto/twofish.c | |||
@@ -643,11 +643,11 @@ struct twofish_ctx { | |||
643 | }; | 643 | }; |
644 | 644 | ||
645 | /* Perform the key setup. */ | 645 | /* Perform the key setup. */ |
646 | static int twofish_setkey(void *cx, const u8 *key, | 646 | static int twofish_setkey(struct crypto_tfm *tfm, const u8 *key, |
647 | unsigned int key_len, u32 *flags) | 647 | unsigned int key_len, u32 *flags) |
648 | { | 648 | { |
649 | 649 | ||
650 | struct twofish_ctx *ctx = cx; | 650 | struct twofish_ctx *ctx = crypto_tfm_ctx(tfm); |
651 | 651 | ||
652 | int i, j, k; | 652 | int i, j, k; |
653 | 653 | ||
@@ -802,9 +802,9 @@ static int twofish_setkey(void *cx, const u8 *key, | |||
802 | } | 802 | } |
803 | 803 | ||
804 | /* Encrypt one block. in and out may be the same. */ | 804 | /* Encrypt one block. in and out may be the same. */ |
805 | static void twofish_encrypt(void *cx, u8 *out, const u8 *in) | 805 | static void twofish_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
806 | { | 806 | { |
807 | struct twofish_ctx *ctx = cx; | 807 | struct twofish_ctx *ctx = crypto_tfm_ctx(tfm); |
808 | const __le32 *src = (const __le32 *)in; | 808 | const __le32 *src = (const __le32 *)in; |
809 | __le32 *dst = (__le32 *)out; | 809 | __le32 *dst = (__le32 *)out; |
810 | 810 | ||
@@ -839,9 +839,9 @@ static void twofish_encrypt(void *cx, u8 *out, const u8 *in) | |||
839 | } | 839 | } |
840 | 840 | ||
841 | /* Decrypt one block. in and out may be the same. */ | 841 | /* Decrypt one block. in and out may be the same. */ |
842 | static void twofish_decrypt(void *cx, u8 *out, const u8 *in) | 842 | static void twofish_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
843 | { | 843 | { |
844 | struct twofish_ctx *ctx = cx; | 844 | struct twofish_ctx *ctx = crypto_tfm_ctx(tfm); |
845 | const __le32 *src = (const __le32 *)in; | 845 | const __le32 *src = (const __le32 *)in; |
846 | __le32 *dst = (__le32 *)out; | 846 | __le32 *dst = (__le32 *)out; |
847 | 847 | ||
diff --git a/crypto/wp512.c b/crypto/wp512.c index b226a126cfae..727d05a19ff4 100644 --- a/crypto/wp512.c +++ b/crypto/wp512.c | |||
@@ -981,9 +981,9 @@ static void wp512_process_buffer(struct wp512_ctx *wctx) { | |||
981 | 981 | ||
982 | } | 982 | } |
983 | 983 | ||
984 | static void wp512_init (void *ctx) { | 984 | static void wp512_init(struct crypto_tfm *tfm) { |
985 | struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); | ||
985 | int i; | 986 | int i; |
986 | struct wp512_ctx *wctx = ctx; | ||
987 | 987 | ||
988 | memset(wctx->bitLength, 0, 32); | 988 | memset(wctx->bitLength, 0, 32); |
989 | wctx->bufferBits = wctx->bufferPos = 0; | 989 | wctx->bufferBits = wctx->bufferPos = 0; |
@@ -993,10 +993,10 @@ static void wp512_init (void *ctx) { | |||
993 | } | 993 | } |
994 | } | 994 | } |
995 | 995 | ||
996 | static void wp512_update(void *ctx, const u8 *source, unsigned int len) | 996 | static void wp512_update(struct crypto_tfm *tfm, const u8 *source, |
997 | unsigned int len) | ||
997 | { | 998 | { |
998 | 999 | struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); | |
999 | struct wp512_ctx *wctx = ctx; | ||
1000 | int sourcePos = 0; | 1000 | int sourcePos = 0; |
1001 | unsigned int bits_len = len * 8; // convert to number of bits | 1001 | unsigned int bits_len = len * 8; // convert to number of bits |
1002 | int sourceGap = (8 - ((int)bits_len & 7)) & 7; | 1002 | int sourceGap = (8 - ((int)bits_len & 7)) & 7; |
@@ -1054,9 +1054,9 @@ static void wp512_update(void *ctx, const u8 *source, unsigned int len) | |||
1054 | 1054 | ||
1055 | } | 1055 | } |
1056 | 1056 | ||
1057 | static void wp512_final(void *ctx, u8 *out) | 1057 | static void wp512_final(struct crypto_tfm *tfm, u8 *out) |
1058 | { | 1058 | { |
1059 | struct wp512_ctx *wctx = ctx; | 1059 | struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); |
1060 | int i; | 1060 | int i; |
1061 | u8 *buffer = wctx->buffer; | 1061 | u8 *buffer = wctx->buffer; |
1062 | u8 *bitLength = wctx->bitLength; | 1062 | u8 *bitLength = wctx->bitLength; |
@@ -1087,22 +1087,20 @@ static void wp512_final(void *ctx, u8 *out) | |||
1087 | wctx->bufferPos = bufferPos; | 1087 | wctx->bufferPos = bufferPos; |
1088 | } | 1088 | } |
1089 | 1089 | ||
1090 | static void wp384_final(void *ctx, u8 *out) | 1090 | static void wp384_final(struct crypto_tfm *tfm, u8 *out) |
1091 | { | 1091 | { |
1092 | struct wp512_ctx *wctx = ctx; | ||
1093 | u8 D[64]; | 1092 | u8 D[64]; |
1094 | 1093 | ||
1095 | wp512_final (wctx, D); | 1094 | wp512_final(tfm, D); |
1096 | memcpy (out, D, WP384_DIGEST_SIZE); | 1095 | memcpy (out, D, WP384_DIGEST_SIZE); |
1097 | memset (D, 0, WP512_DIGEST_SIZE); | 1096 | memset (D, 0, WP512_DIGEST_SIZE); |
1098 | } | 1097 | } |
1099 | 1098 | ||
1100 | static void wp256_final(void *ctx, u8 *out) | 1099 | static void wp256_final(struct crypto_tfm *tfm, u8 *out) |
1101 | { | 1100 | { |
1102 | struct wp512_ctx *wctx = ctx; | ||
1103 | u8 D[64]; | 1101 | u8 D[64]; |
1104 | 1102 | ||
1105 | wp512_final (wctx, D); | 1103 | wp512_final(tfm, D); |
1106 | memcpy (out, D, WP256_DIGEST_SIZE); | 1104 | memcpy (out, D, WP256_DIGEST_SIZE); |
1107 | memset (D, 0, WP512_DIGEST_SIZE); | 1105 | memset (D, 0, WP512_DIGEST_SIZE); |
1108 | } | 1106 | } |
diff --git a/drivers/crypto/padlock-aes.c b/drivers/crypto/padlock-aes.c index 5158a9db4bc5..17ee684144f9 100644 --- a/drivers/crypto/padlock-aes.c +++ b/drivers/crypto/padlock-aes.c | |||
@@ -60,15 +60,14 @@ | |||
60 | #define AES_EXTENDED_KEY_SIZE_B (AES_EXTENDED_KEY_SIZE * sizeof(uint32_t)) | 60 | #define AES_EXTENDED_KEY_SIZE_B (AES_EXTENDED_KEY_SIZE * sizeof(uint32_t)) |
61 | 61 | ||
62 | struct aes_ctx { | 62 | struct aes_ctx { |
63 | uint32_t e_data[AES_EXTENDED_KEY_SIZE]; | ||
64 | uint32_t d_data[AES_EXTENDED_KEY_SIZE]; | ||
65 | struct { | 63 | struct { |
66 | struct cword encrypt; | 64 | struct cword encrypt; |
67 | struct cword decrypt; | 65 | struct cword decrypt; |
68 | } cword; | 66 | } cword; |
69 | uint32_t *E; | 67 | u32 *D; |
70 | uint32_t *D; | ||
71 | int key_length; | 68 | int key_length; |
69 | u32 E[AES_EXTENDED_KEY_SIZE]; | ||
70 | u32 d_data[AES_EXTENDED_KEY_SIZE]; | ||
72 | }; | 71 | }; |
73 | 72 | ||
74 | /* ====== Key management routines ====== */ | 73 | /* ====== Key management routines ====== */ |
@@ -282,19 +281,20 @@ aes_hw_extkey_available(uint8_t key_len) | |||
282 | return 0; | 281 | return 0; |
283 | } | 282 | } |
284 | 283 | ||
285 | static inline struct aes_ctx *aes_ctx(void *ctx) | 284 | static inline struct aes_ctx *aes_ctx(struct crypto_tfm *tfm) |
286 | { | 285 | { |
286 | unsigned long addr = (unsigned long)crypto_tfm_ctx(tfm); | ||
287 | unsigned long align = PADLOCK_ALIGNMENT; | 287 | unsigned long align = PADLOCK_ALIGNMENT; |
288 | 288 | ||
289 | if (align <= crypto_tfm_ctx_alignment()) | 289 | if (align <= crypto_tfm_ctx_alignment()) |
290 | align = 1; | 290 | align = 1; |
291 | return (struct aes_ctx *)ALIGN((unsigned long)ctx, align); | 291 | return (struct aes_ctx *)ALIGN(addr, align); |
292 | } | 292 | } |
293 | 293 | ||
294 | static int | 294 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
295 | aes_set_key(void *ctx_arg, const uint8_t *in_key, unsigned int key_len, uint32_t *flags) | 295 | unsigned int key_len, u32 *flags) |
296 | { | 296 | { |
297 | struct aes_ctx *ctx = aes_ctx(ctx_arg); | 297 | struct aes_ctx *ctx = aes_ctx(tfm); |
298 | const __le32 *key = (const __le32 *)in_key; | 298 | const __le32 *key = (const __le32 *)in_key; |
299 | uint32_t i, t, u, v, w; | 299 | uint32_t i, t, u, v, w; |
300 | uint32_t P[AES_EXTENDED_KEY_SIZE]; | 300 | uint32_t P[AES_EXTENDED_KEY_SIZE]; |
@@ -312,8 +312,7 @@ aes_set_key(void *ctx_arg, const uint8_t *in_key, unsigned int key_len, uint32_t | |||
312 | * itself we must supply the plain key for both encryption | 312 | * itself we must supply the plain key for both encryption |
313 | * and decryption. | 313 | * and decryption. |
314 | */ | 314 | */ |
315 | ctx->E = ctx->e_data; | 315 | ctx->D = ctx->E; |
316 | ctx->D = ctx->e_data; | ||
317 | 316 | ||
318 | E_KEY[0] = le32_to_cpu(key[0]); | 317 | E_KEY[0] = le32_to_cpu(key[0]); |
319 | E_KEY[1] = le32_to_cpu(key[1]); | 318 | E_KEY[1] = le32_to_cpu(key[1]); |
@@ -414,24 +413,22 @@ static inline u8 *padlock_xcrypt_cbc(const u8 *input, u8 *output, void *key, | |||
414 | return iv; | 413 | return iv; |
415 | } | 414 | } |
416 | 415 | ||
417 | static void | 416 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
418 | aes_encrypt(void *ctx_arg, uint8_t *out, const uint8_t *in) | ||
419 | { | 417 | { |
420 | struct aes_ctx *ctx = aes_ctx(ctx_arg); | 418 | struct aes_ctx *ctx = aes_ctx(tfm); |
421 | padlock_xcrypt_ecb(in, out, ctx->E, &ctx->cword.encrypt, 1); | 419 | padlock_xcrypt_ecb(in, out, ctx->E, &ctx->cword.encrypt, 1); |
422 | } | 420 | } |
423 | 421 | ||
424 | static void | 422 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
425 | aes_decrypt(void *ctx_arg, uint8_t *out, const uint8_t *in) | ||
426 | { | 423 | { |
427 | struct aes_ctx *ctx = aes_ctx(ctx_arg); | 424 | struct aes_ctx *ctx = aes_ctx(tfm); |
428 | padlock_xcrypt_ecb(in, out, ctx->D, &ctx->cword.decrypt, 1); | 425 | padlock_xcrypt_ecb(in, out, ctx->D, &ctx->cword.decrypt, 1); |
429 | } | 426 | } |
430 | 427 | ||
431 | static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out, | 428 | static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out, |
432 | const u8 *in, unsigned int nbytes) | 429 | const u8 *in, unsigned int nbytes) |
433 | { | 430 | { |
434 | struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm)); | 431 | struct aes_ctx *ctx = aes_ctx(desc->tfm); |
435 | padlock_xcrypt_ecb(in, out, ctx->E, &ctx->cword.encrypt, | 432 | padlock_xcrypt_ecb(in, out, ctx->E, &ctx->cword.encrypt, |
436 | nbytes / AES_BLOCK_SIZE); | 433 | nbytes / AES_BLOCK_SIZE); |
437 | return nbytes & ~(AES_BLOCK_SIZE - 1); | 434 | return nbytes & ~(AES_BLOCK_SIZE - 1); |
@@ -440,7 +437,7 @@ static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out, | |||
440 | static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out, | 437 | static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out, |
441 | const u8 *in, unsigned int nbytes) | 438 | const u8 *in, unsigned int nbytes) |
442 | { | 439 | { |
443 | struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm)); | 440 | struct aes_ctx *ctx = aes_ctx(desc->tfm); |
444 | padlock_xcrypt_ecb(in, out, ctx->D, &ctx->cword.decrypt, | 441 | padlock_xcrypt_ecb(in, out, ctx->D, &ctx->cword.decrypt, |
445 | nbytes / AES_BLOCK_SIZE); | 442 | nbytes / AES_BLOCK_SIZE); |
446 | return nbytes & ~(AES_BLOCK_SIZE - 1); | 443 | return nbytes & ~(AES_BLOCK_SIZE - 1); |
@@ -449,7 +446,7 @@ static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out, | |||
449 | static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out, | 446 | static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out, |
450 | const u8 *in, unsigned int nbytes) | 447 | const u8 *in, unsigned int nbytes) |
451 | { | 448 | { |
452 | struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm)); | 449 | struct aes_ctx *ctx = aes_ctx(desc->tfm); |
453 | u8 *iv; | 450 | u8 *iv; |
454 | 451 | ||
455 | iv = padlock_xcrypt_cbc(in, out, ctx->E, desc->info, | 452 | iv = padlock_xcrypt_cbc(in, out, ctx->E, desc->info, |
@@ -462,7 +459,7 @@ static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out, | |||
462 | static unsigned int aes_decrypt_cbc(const struct cipher_desc *desc, u8 *out, | 459 | static unsigned int aes_decrypt_cbc(const struct cipher_desc *desc, u8 *out, |
463 | const u8 *in, unsigned int nbytes) | 460 | const u8 *in, unsigned int nbytes) |
464 | { | 461 | { |
465 | struct aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(desc->tfm)); | 462 | struct aes_ctx *ctx = aes_ctx(desc->tfm); |
466 | padlock_xcrypt_cbc(in, out, ctx->D, desc->info, &ctx->cword.decrypt, | 463 | padlock_xcrypt_cbc(in, out, ctx->D, desc->info, &ctx->cword.decrypt, |
467 | nbytes / AES_BLOCK_SIZE); | 464 | nbytes / AES_BLOCK_SIZE); |
468 | return nbytes & ~(AES_BLOCK_SIZE - 1); | 465 | return nbytes & ~(AES_BLOCK_SIZE - 1); |
diff --git a/include/linux/crypto.h b/include/linux/crypto.h index 5a0470e36111..7f946241b879 100644 --- a/include/linux/crypto.h +++ b/include/linux/crypto.h | |||
@@ -66,7 +66,7 @@ struct crypto_tfm; | |||
66 | 66 | ||
67 | struct cipher_desc { | 67 | struct cipher_desc { |
68 | struct crypto_tfm *tfm; | 68 | struct crypto_tfm *tfm; |
69 | void (*crfn)(void *ctx, u8 *dst, const u8 *src); | 69 | void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); |
70 | unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, | 70 | unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, |
71 | const u8 *src, unsigned int nbytes); | 71 | const u8 *src, unsigned int nbytes); |
72 | void *info; | 72 | void *info; |
@@ -79,10 +79,10 @@ struct cipher_desc { | |||
79 | struct cipher_alg { | 79 | struct cipher_alg { |
80 | unsigned int cia_min_keysize; | 80 | unsigned int cia_min_keysize; |
81 | unsigned int cia_max_keysize; | 81 | unsigned int cia_max_keysize; |
82 | int (*cia_setkey)(void *ctx, const u8 *key, | 82 | int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, |
83 | unsigned int keylen, u32 *flags); | 83 | unsigned int keylen, u32 *flags); |
84 | void (*cia_encrypt)(void *ctx, u8 *dst, const u8 *src); | 84 | void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); |
85 | void (*cia_decrypt)(void *ctx, u8 *dst, const u8 *src); | 85 | void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); |
86 | 86 | ||
87 | unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc, | 87 | unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc, |
88 | u8 *dst, const u8 *src, | 88 | u8 *dst, const u8 *src, |
@@ -100,20 +100,19 @@ struct cipher_alg { | |||
100 | 100 | ||
101 | struct digest_alg { | 101 | struct digest_alg { |
102 | unsigned int dia_digestsize; | 102 | unsigned int dia_digestsize; |
103 | void (*dia_init)(void *ctx); | 103 | void (*dia_init)(struct crypto_tfm *tfm); |
104 | void (*dia_update)(void *ctx, const u8 *data, unsigned int len); | 104 | void (*dia_update)(struct crypto_tfm *tfm, const u8 *data, |
105 | void (*dia_final)(void *ctx, u8 *out); | 105 | unsigned int len); |
106 | int (*dia_setkey)(void *ctx, const u8 *key, | 106 | void (*dia_final)(struct crypto_tfm *tfm, u8 *out); |
107 | int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key, | ||
107 | unsigned int keylen, u32 *flags); | 108 | unsigned int keylen, u32 *flags); |
108 | }; | 109 | }; |
109 | 110 | ||
110 | struct compress_alg { | 111 | struct compress_alg { |
111 | int (*coa_init)(void *ctx); | 112 | int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src, |
112 | void (*coa_exit)(void *ctx); | 113 | unsigned int slen, u8 *dst, unsigned int *dlen); |
113 | int (*coa_compress)(void *ctx, const u8 *src, unsigned int slen, | 114 | int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src, |
114 | u8 *dst, unsigned int *dlen); | 115 | unsigned int slen, u8 *dst, unsigned int *dlen); |
115 | int (*coa_decompress)(void *ctx, const u8 *src, unsigned int slen, | ||
116 | u8 *dst, unsigned int *dlen); | ||
117 | }; | 116 | }; |
118 | 117 | ||
119 | #define cra_cipher cra_u.cipher | 118 | #define cra_cipher cra_u.cipher |
@@ -129,14 +128,17 @@ struct crypto_alg { | |||
129 | 128 | ||
130 | int cra_priority; | 129 | int cra_priority; |
131 | 130 | ||
132 | const char cra_name[CRYPTO_MAX_ALG_NAME]; | 131 | char cra_name[CRYPTO_MAX_ALG_NAME]; |
133 | const char cra_driver_name[CRYPTO_MAX_ALG_NAME]; | 132 | char cra_driver_name[CRYPTO_MAX_ALG_NAME]; |
134 | 133 | ||
135 | union { | 134 | union { |
136 | struct cipher_alg cipher; | 135 | struct cipher_alg cipher; |
137 | struct digest_alg digest; | 136 | struct digest_alg digest; |
138 | struct compress_alg compress; | 137 | struct compress_alg compress; |
139 | } cra_u; | 138 | } cra_u; |
139 | |||
140 | int (*cra_init)(struct crypto_tfm *tfm); | ||
141 | void (*cra_exit)(struct crypto_tfm *tfm); | ||
140 | 142 | ||
141 | struct module *cra_module; | 143 | struct module *cra_module; |
142 | }; | 144 | }; |