diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2006-05-16 08:09:29 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2006-06-26 03:34:39 -0400 |
commit | 6c2bb98bc33ae33c7a33a133a4cd5a06395fece5 (patch) | |
tree | 96684cd2c473cd05d651ce1fa3dd72b1b4b19b09 /arch | |
parent | 43600106e32809a4dead79fec67a63e9860e3d5d (diff) |
[CRYPTO] all: Pass tfm instead of ctx to algorithms
Up until now algorithms have been happy to get a context pointer since
they know everything that's in the tfm already (e.g., alignment, block
size).
However, once we have parameterised algorithms, such information will
be specific to each tfm. So the algorithm API needs to be changed to
pass the tfm structure instead of the context pointer.
This patch is basically a text substitution. The only tricky bit is
the assembly routines that need to get the context pointer offset
through asm-offsets.h.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/i386/crypto/aes-i586-asm.S | 28 | ||||
-rw-r--r-- | arch/i386/crypto/aes.c | 10 | ||||
-rw-r--r-- | arch/i386/kernel/asm-offsets.c | 3 | ||||
-rw-r--r-- | arch/s390/crypto/aes_s390.c | 14 | ||||
-rw-r--r-- | arch/s390/crypto/des_s390.c | 42 | ||||
-rw-r--r-- | arch/s390/crypto/sha1_s390.c | 15 | ||||
-rw-r--r-- | arch/s390/crypto/sha256_s390.c | 13 | ||||
-rw-r--r-- | arch/x86_64/crypto/aes-x86_64-asm.S | 18 | ||||
-rw-r--r-- | arch/x86_64/crypto/aes.c | 10 | ||||
-rw-r--r-- | arch/x86_64/kernel/asm-offsets.c | 3 |
10 files changed, 83 insertions, 73 deletions
diff --git a/arch/i386/crypto/aes-i586-asm.S b/arch/i386/crypto/aes-i586-asm.S index 2851f7fe51e6..f942f0c8f630 100644 --- a/arch/i386/crypto/aes-i586-asm.S +++ b/arch/i386/crypto/aes-i586-asm.S | |||
@@ -36,19 +36,19 @@ | |||
36 | .file "aes-i586-asm.S" | 36 | .file "aes-i586-asm.S" |
37 | .text | 37 | .text |
38 | 38 | ||
39 | #define tlen 1024 // length of each of 4 'xor' arrays (256 32-bit words) | 39 | #include <asm/asm-offsets.h> |
40 | |||
41 | // offsets to parameters with one register pushed onto stack | ||
42 | 40 | ||
43 | #define in_blk 16 // input byte array address parameter | 41 | #define tlen 1024 // length of each of 4 'xor' arrays (256 32-bit words) |
44 | #define out_blk 12 // output byte array address parameter | ||
45 | #define ctx 8 // AES context structure | ||
46 | 42 | ||
47 | // offsets in context structure | 43 | /* offsets to parameters with one register pushed onto stack */ |
44 | #define tfm 8 | ||
45 | #define out_blk 12 | ||
46 | #define in_blk 16 | ||
48 | 47 | ||
49 | #define ekey 0 // encryption key schedule base address | 48 | /* offsets in crypto_tfm structure */ |
50 | #define nrnd 256 // number of rounds | 49 | #define ekey (crypto_tfm_ctx_offset + 0) |
51 | #define dkey 260 // decryption key schedule base address | 50 | #define nrnd (crypto_tfm_ctx_offset + 256) |
51 | #define dkey (crypto_tfm_ctx_offset + 260) | ||
52 | 52 | ||
53 | // register mapping for encrypt and decrypt subroutines | 53 | // register mapping for encrypt and decrypt subroutines |
54 | 54 | ||
@@ -217,7 +217,7 @@ | |||
217 | do_col (table, r5,r0,r1,r4, r2,r3); /* idx=r5 */ | 217 | do_col (table, r5,r0,r1,r4, r2,r3); /* idx=r5 */ |
218 | 218 | ||
219 | // AES (Rijndael) Encryption Subroutine | 219 | // AES (Rijndael) Encryption Subroutine |
220 | /* void aes_enc_blk(void *ctx, u8 *out_blk, const u8 *in_blk) */ | 220 | /* void aes_enc_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */ |
221 | 221 | ||
222 | .global aes_enc_blk | 222 | .global aes_enc_blk |
223 | 223 | ||
@@ -228,7 +228,7 @@ | |||
228 | 228 | ||
229 | aes_enc_blk: | 229 | aes_enc_blk: |
230 | push %ebp | 230 | push %ebp |
231 | mov ctx(%esp),%ebp // pointer to context | 231 | mov tfm(%esp),%ebp |
232 | 232 | ||
233 | // CAUTION: the order and the values used in these assigns | 233 | // CAUTION: the order and the values used in these assigns |
234 | // rely on the register mappings | 234 | // rely on the register mappings |
@@ -293,7 +293,7 @@ aes_enc_blk: | |||
293 | ret | 293 | ret |
294 | 294 | ||
295 | // AES (Rijndael) Decryption Subroutine | 295 | // AES (Rijndael) Decryption Subroutine |
296 | /* void aes_dec_blk(void *ctx, u8 *out_blk, const u8 *in_blk) */ | 296 | /* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out_blk, const u8 *in_blk) */ |
297 | 297 | ||
298 | .global aes_dec_blk | 298 | .global aes_dec_blk |
299 | 299 | ||
@@ -304,7 +304,7 @@ aes_enc_blk: | |||
304 | 304 | ||
305 | aes_dec_blk: | 305 | aes_dec_blk: |
306 | push %ebp | 306 | push %ebp |
307 | mov ctx(%esp),%ebp // pointer to context | 307 | mov tfm(%esp),%ebp |
308 | 308 | ||
309 | // CAUTION: the order and the values used in these assigns | 309 | // CAUTION: the order and the values used in these assigns |
310 | // rely on the register mappings | 310 | // rely on the register mappings |
diff --git a/arch/i386/crypto/aes.c b/arch/i386/crypto/aes.c index a0e033510a3b..b9c7d99160f1 100644 --- a/arch/i386/crypto/aes.c +++ b/arch/i386/crypto/aes.c | |||
@@ -45,8 +45,8 @@ | |||
45 | #include <linux/crypto.h> | 45 | #include <linux/crypto.h> |
46 | #include <linux/linkage.h> | 46 | #include <linux/linkage.h> |
47 | 47 | ||
48 | asmlinkage void aes_enc_blk(void *ctx, u8 *dst, const u8 *src); | 48 | asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src); |
49 | asmlinkage void aes_dec_blk(void *ctx, u8 *dst, const u8 *src); | 49 | asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src); |
50 | 50 | ||
51 | #define AES_MIN_KEY_SIZE 16 | 51 | #define AES_MIN_KEY_SIZE 16 |
52 | #define AES_MAX_KEY_SIZE 32 | 52 | #define AES_MAX_KEY_SIZE 32 |
@@ -378,12 +378,12 @@ static void gen_tabs(void) | |||
378 | k[8*(i)+11] = ss[3]; \ | 378 | k[8*(i)+11] = ss[3]; \ |
379 | } | 379 | } |
380 | 380 | ||
381 | static int | 381 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
382 | aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, u32 *flags) | 382 | unsigned int key_len, u32 *flags) |
383 | { | 383 | { |
384 | int i; | 384 | int i; |
385 | u32 ss[8]; | 385 | u32 ss[8]; |
386 | struct aes_ctx *ctx = ctx_arg; | 386 | struct aes_ctx *ctx = crypto_tfm_ctx(tfm); |
387 | const __le32 *key = (const __le32 *)in_key; | 387 | const __le32 *key = (const __le32 *)in_key; |
388 | 388 | ||
389 | /* encryption schedule */ | 389 | /* encryption schedule */ |
diff --git a/arch/i386/kernel/asm-offsets.c b/arch/i386/kernel/asm-offsets.c index 36d66e2077d0..1c3a809e6421 100644 --- a/arch/i386/kernel/asm-offsets.c +++ b/arch/i386/kernel/asm-offsets.c | |||
@@ -4,6 +4,7 @@ | |||
4 | * to extract and format the required data. | 4 | * to extract and format the required data. |
5 | */ | 5 | */ |
6 | 6 | ||
7 | #include <linux/crypto.h> | ||
7 | #include <linux/sched.h> | 8 | #include <linux/sched.h> |
8 | #include <linux/signal.h> | 9 | #include <linux/signal.h> |
9 | #include <linux/personality.h> | 10 | #include <linux/personality.h> |
@@ -69,4 +70,6 @@ void foo(void) | |||
69 | 70 | ||
70 | DEFINE(PAGE_SIZE_asm, PAGE_SIZE); | 71 | DEFINE(PAGE_SIZE_asm, PAGE_SIZE); |
71 | DEFINE(VSYSCALL_BASE, __fix_to_virt(FIX_VSYSCALL)); | 72 | DEFINE(VSYSCALL_BASE, __fix_to_virt(FIX_VSYSCALL)); |
73 | |||
74 | OFFSET(crypto_tfm_ctx_offset, crypto_tfm, __crt_ctx); | ||
72 | } | 75 | } |
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index c5ca2dc5d428..5713c7e5bd16 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c | |||
@@ -37,10 +37,10 @@ struct s390_aes_ctx { | |||
37 | int key_len; | 37 | int key_len; |
38 | }; | 38 | }; |
39 | 39 | ||
40 | static int aes_set_key(void *ctx, const u8 *in_key, unsigned int key_len, | 40 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
41 | u32 *flags) | 41 | unsigned int key_len, u32 *flags) |
42 | { | 42 | { |
43 | struct s390_aes_ctx *sctx = ctx; | 43 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
44 | 44 | ||
45 | switch (key_len) { | 45 | switch (key_len) { |
46 | case 16: | 46 | case 16: |
@@ -70,9 +70,9 @@ fail: | |||
70 | return -EINVAL; | 70 | return -EINVAL; |
71 | } | 71 | } |
72 | 72 | ||
73 | static void aes_encrypt(void *ctx, u8 *out, const u8 *in) | 73 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
74 | { | 74 | { |
75 | const struct s390_aes_ctx *sctx = ctx; | 75 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
76 | 76 | ||
77 | switch (sctx->key_len) { | 77 | switch (sctx->key_len) { |
78 | case 16: | 78 | case 16: |
@@ -90,9 +90,9 @@ static void aes_encrypt(void *ctx, u8 *out, const u8 *in) | |||
90 | } | 90 | } |
91 | } | 91 | } |
92 | 92 | ||
93 | static void aes_decrypt(void *ctx, u8 *out, const u8 *in) | 93 | static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
94 | { | 94 | { |
95 | const struct s390_aes_ctx *sctx = ctx; | 95 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
96 | 96 | ||
97 | switch (sctx->key_len) { | 97 | switch (sctx->key_len) { |
98 | case 16: | 98 | case 16: |
diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c index e3c37aa0a199..b3f7496a79b4 100644 --- a/arch/s390/crypto/des_s390.c +++ b/arch/s390/crypto/des_s390.c | |||
@@ -44,10 +44,10 @@ struct crypt_s390_des3_192_ctx { | |||
44 | u8 key[DES3_192_KEY_SIZE]; | 44 | u8 key[DES3_192_KEY_SIZE]; |
45 | }; | 45 | }; |
46 | 46 | ||
47 | static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, | 47 | static int des_setkey(struct crypto_tfm *tfm, const u8 *key, |
48 | u32 *flags) | 48 | unsigned int keylen, u32 *flags) |
49 | { | 49 | { |
50 | struct crypt_s390_des_ctx *dctx = ctx; | 50 | struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); |
51 | int ret; | 51 | int ret; |
52 | 52 | ||
53 | /* test if key is valid (not a weak key) */ | 53 | /* test if key is valid (not a weak key) */ |
@@ -57,16 +57,16 @@ static int des_setkey(void *ctx, const u8 *key, unsigned int keylen, | |||
57 | return ret; | 57 | return ret; |
58 | } | 58 | } |
59 | 59 | ||
60 | static void des_encrypt(void *ctx, u8 *out, const u8 *in) | 60 | static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
61 | { | 61 | { |
62 | struct crypt_s390_des_ctx *dctx = ctx; | 62 | struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); |
63 | 63 | ||
64 | crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, out, in, DES_BLOCK_SIZE); | 64 | crypt_s390_km(KM_DEA_ENCRYPT, dctx->key, out, in, DES_BLOCK_SIZE); |
65 | } | 65 | } |
66 | 66 | ||
67 | static void des_decrypt(void *ctx, u8 *out, const u8 *in) | 67 | static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
68 | { | 68 | { |
69 | struct crypt_s390_des_ctx *dctx = ctx; | 69 | struct crypt_s390_des_ctx *dctx = crypto_tfm_ctx(tfm); |
70 | 70 | ||
71 | crypt_s390_km(KM_DEA_DECRYPT, dctx->key, out, in, DES_BLOCK_SIZE); | 71 | crypt_s390_km(KM_DEA_DECRYPT, dctx->key, out, in, DES_BLOCK_SIZE); |
72 | } | 72 | } |
@@ -166,11 +166,11 @@ static struct crypto_alg des_alg = { | |||
166 | * Implementers MUST reject keys that exhibit this property. | 166 | * Implementers MUST reject keys that exhibit this property. |
167 | * | 167 | * |
168 | */ | 168 | */ |
169 | static int des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, | 169 | static int des3_128_setkey(struct crypto_tfm *tfm, const u8 *key, |
170 | u32 *flags) | 170 | unsigned int keylen, u32 *flags) |
171 | { | 171 | { |
172 | int i, ret; | 172 | int i, ret; |
173 | struct crypt_s390_des3_128_ctx *dctx = ctx; | 173 | struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); |
174 | const u8* temp_key = key; | 174 | const u8* temp_key = key; |
175 | 175 | ||
176 | if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) { | 176 | if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE))) { |
@@ -186,17 +186,17 @@ static int des3_128_setkey(void *ctx, const u8 *key, unsigned int keylen, | |||
186 | return 0; | 186 | return 0; |
187 | } | 187 | } |
188 | 188 | ||
189 | static void des3_128_encrypt(void *ctx, u8 *dst, const u8 *src) | 189 | static void des3_128_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
190 | { | 190 | { |
191 | struct crypt_s390_des3_128_ctx *dctx = ctx; | 191 | struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); |
192 | 192 | ||
193 | crypt_s390_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src, | 193 | crypt_s390_km(KM_TDEA_128_ENCRYPT, dctx->key, dst, (void*)src, |
194 | DES3_128_BLOCK_SIZE); | 194 | DES3_128_BLOCK_SIZE); |
195 | } | 195 | } |
196 | 196 | ||
197 | static void des3_128_decrypt(void *ctx, u8 *dst, const u8 *src) | 197 | static void des3_128_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
198 | { | 198 | { |
199 | struct crypt_s390_des3_128_ctx *dctx = ctx; | 199 | struct crypt_s390_des3_128_ctx *dctx = crypto_tfm_ctx(tfm); |
200 | 200 | ||
201 | crypt_s390_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src, | 201 | crypt_s390_km(KM_TDEA_128_DECRYPT, dctx->key, dst, (void*)src, |
202 | DES3_128_BLOCK_SIZE); | 202 | DES3_128_BLOCK_SIZE); |
@@ -302,11 +302,11 @@ static struct crypto_alg des3_128_alg = { | |||
302 | * property. | 302 | * property. |
303 | * | 303 | * |
304 | */ | 304 | */ |
305 | static int des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, | 305 | static int des3_192_setkey(struct crypto_tfm *tfm, const u8 *key, |
306 | u32 *flags) | 306 | unsigned int keylen, u32 *flags) |
307 | { | 307 | { |
308 | int i, ret; | 308 | int i, ret; |
309 | struct crypt_s390_des3_192_ctx *dctx = ctx; | 309 | struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); |
310 | const u8* temp_key = key; | 310 | const u8* temp_key = key; |
311 | 311 | ||
312 | if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && | 312 | if (!(memcmp(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && |
@@ -325,17 +325,17 @@ static int des3_192_setkey(void *ctx, const u8 *key, unsigned int keylen, | |||
325 | return 0; | 325 | return 0; |
326 | } | 326 | } |
327 | 327 | ||
328 | static void des3_192_encrypt(void *ctx, u8 *dst, const u8 *src) | 328 | static void des3_192_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
329 | { | 329 | { |
330 | struct crypt_s390_des3_192_ctx *dctx = ctx; | 330 | struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); |
331 | 331 | ||
332 | crypt_s390_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src, | 332 | crypt_s390_km(KM_TDEA_192_ENCRYPT, dctx->key, dst, (void*)src, |
333 | DES3_192_BLOCK_SIZE); | 333 | DES3_192_BLOCK_SIZE); |
334 | } | 334 | } |
335 | 335 | ||
336 | static void des3_192_decrypt(void *ctx, u8 *dst, const u8 *src) | 336 | static void des3_192_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) |
337 | { | 337 | { |
338 | struct crypt_s390_des3_192_ctx *dctx = ctx; | 338 | struct crypt_s390_des3_192_ctx *dctx = crypto_tfm_ctx(tfm); |
339 | 339 | ||
340 | crypt_s390_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src, | 340 | crypt_s390_km(KM_TDEA_192_DECRYPT, dctx->key, dst, (void*)src, |
341 | DES3_192_BLOCK_SIZE); | 341 | DES3_192_BLOCK_SIZE); |
diff --git a/arch/s390/crypto/sha1_s390.c b/arch/s390/crypto/sha1_s390.c index 36bb5346a8c4..9d34a35b1aa5 100644 --- a/arch/s390/crypto/sha1_s390.c +++ b/arch/s390/crypto/sha1_s390.c | |||
@@ -40,9 +40,9 @@ struct crypt_s390_sha1_ctx { | |||
40 | u8 buffer[2 * SHA1_BLOCK_SIZE]; | 40 | u8 buffer[2 * SHA1_BLOCK_SIZE]; |
41 | }; | 41 | }; |
42 | 42 | ||
43 | static void sha1_init(void *ctx_arg) | 43 | static void sha1_init(struct crypto_tfm *tfm) |
44 | { | 44 | { |
45 | struct crypt_s390_sha1_ctx *ctx = ctx_arg; | 45 | struct crypt_s390_sha1_ctx *ctx = crypto_tfm_ctx(tfm); |
46 | static const u32 initstate[5] = { | 46 | static const u32 initstate[5] = { |
47 | 0x67452301, | 47 | 0x67452301, |
48 | 0xEFCDAB89, | 48 | 0xEFCDAB89, |
@@ -56,13 +56,13 @@ static void sha1_init(void *ctx_arg) | |||
56 | ctx->buf_len = 0; | 56 | ctx->buf_len = 0; |
57 | } | 57 | } |
58 | 58 | ||
59 | static void | 59 | static void sha1_update(struct crypto_tfm *tfm, const u8 *data, |
60 | sha1_update(void *ctx, const u8 *data, unsigned int len) | 60 | unsigned int len) |
61 | { | 61 | { |
62 | struct crypt_s390_sha1_ctx *sctx; | 62 | struct crypt_s390_sha1_ctx *sctx; |
63 | long imd_len; | 63 | long imd_len; |
64 | 64 | ||
65 | sctx = ctx; | 65 | sctx = crypto_tfm_ctx(tfm); |
66 | sctx->count += len * 8; //message bit length | 66 | sctx->count += len * 8; //message bit length |
67 | 67 | ||
68 | //anything in buffer yet? -> must be completed | 68 | //anything in buffer yet? -> must be completed |
@@ -111,10 +111,9 @@ pad_message(struct crypt_s390_sha1_ctx* sctx) | |||
111 | } | 111 | } |
112 | 112 | ||
113 | /* Add padding and return the message digest. */ | 113 | /* Add padding and return the message digest. */ |
114 | static void | 114 | static void sha1_final(struct crypto_tfm *tfm, u8 *out) |
115 | sha1_final(void* ctx, u8 *out) | ||
116 | { | 115 | { |
117 | struct crypt_s390_sha1_ctx *sctx = ctx; | 116 | struct crypt_s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm); |
118 | 117 | ||
119 | //must perform manual padding | 118 | //must perform manual padding |
120 | pad_message(sctx); | 119 | pad_message(sctx); |
diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c index 2c76e7bee41c..f573df30f31d 100644 --- a/arch/s390/crypto/sha256_s390.c +++ b/arch/s390/crypto/sha256_s390.c | |||
@@ -31,9 +31,9 @@ struct s390_sha256_ctx { | |||
31 | u8 buf[2 * SHA256_BLOCK_SIZE]; | 31 | u8 buf[2 * SHA256_BLOCK_SIZE]; |
32 | }; | 32 | }; |
33 | 33 | ||
34 | static void sha256_init(void *ctx) | 34 | static void sha256_init(struct crypto_tfm *tfm) |
35 | { | 35 | { |
36 | struct s390_sha256_ctx *sctx = ctx; | 36 | struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); |
37 | 37 | ||
38 | sctx->state[0] = 0x6a09e667; | 38 | sctx->state[0] = 0x6a09e667; |
39 | sctx->state[1] = 0xbb67ae85; | 39 | sctx->state[1] = 0xbb67ae85; |
@@ -46,9 +46,10 @@ static void sha256_init(void *ctx) | |||
46 | sctx->count = 0; | 46 | sctx->count = 0; |
47 | } | 47 | } |
48 | 48 | ||
49 | static void sha256_update(void *ctx, const u8 *data, unsigned int len) | 49 | static void sha256_update(struct crypto_tfm *tfm, const u8 *data, |
50 | unsigned int len) | ||
50 | { | 51 | { |
51 | struct s390_sha256_ctx *sctx = ctx; | 52 | struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); |
52 | unsigned int index; | 53 | unsigned int index; |
53 | int ret; | 54 | int ret; |
54 | 55 | ||
@@ -107,9 +108,9 @@ static void pad_message(struct s390_sha256_ctx* sctx) | |||
107 | } | 108 | } |
108 | 109 | ||
109 | /* Add padding and return the message digest */ | 110 | /* Add padding and return the message digest */ |
110 | static void sha256_final(void* ctx, u8 *out) | 111 | static void sha256_final(struct crypto_tfm *tfm, u8 *out) |
111 | { | 112 | { |
112 | struct s390_sha256_ctx *sctx = ctx; | 113 | struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); |
113 | 114 | ||
114 | /* must perform manual padding */ | 115 | /* must perform manual padding */ |
115 | pad_message(sctx); | 116 | pad_message(sctx); |
diff --git a/arch/x86_64/crypto/aes-x86_64-asm.S b/arch/x86_64/crypto/aes-x86_64-asm.S index 483cbb23ab8d..f3ba643e144d 100644 --- a/arch/x86_64/crypto/aes-x86_64-asm.S +++ b/arch/x86_64/crypto/aes-x86_64-asm.S | |||
@@ -15,6 +15,10 @@ | |||
15 | 15 | ||
16 | .text | 16 | .text |
17 | 17 | ||
18 | #include <asm/asm-offsets.h> | ||
19 | |||
20 | #define BASE crypto_tfm_ctx_offset | ||
21 | |||
18 | #define R1 %rax | 22 | #define R1 %rax |
19 | #define R1E %eax | 23 | #define R1E %eax |
20 | #define R1X %ax | 24 | #define R1X %ax |
@@ -46,19 +50,19 @@ | |||
46 | #define R10 %r10 | 50 | #define R10 %r10 |
47 | #define R11 %r11 | 51 | #define R11 %r11 |
48 | 52 | ||
49 | #define prologue(FUNC,BASE,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \ | 53 | #define prologue(FUNC,KEY,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \ |
50 | .global FUNC; \ | 54 | .global FUNC; \ |
51 | .type FUNC,@function; \ | 55 | .type FUNC,@function; \ |
52 | .align 8; \ | 56 | .align 8; \ |
53 | FUNC: movq r1,r2; \ | 57 | FUNC: movq r1,r2; \ |
54 | movq r3,r4; \ | 58 | movq r3,r4; \ |
55 | leaq BASE+52(r8),r9; \ | 59 | leaq BASE+KEY+52(r8),r9; \ |
56 | movq r10,r11; \ | 60 | movq r10,r11; \ |
57 | movl (r7),r5 ## E; \ | 61 | movl (r7),r5 ## E; \ |
58 | movl 4(r7),r1 ## E; \ | 62 | movl 4(r7),r1 ## E; \ |
59 | movl 8(r7),r6 ## E; \ | 63 | movl 8(r7),r6 ## E; \ |
60 | movl 12(r7),r7 ## E; \ | 64 | movl 12(r7),r7 ## E; \ |
61 | movl (r8),r10 ## E; \ | 65 | movl BASE(r8),r10 ## E; \ |
62 | xorl -48(r9),r5 ## E; \ | 66 | xorl -48(r9),r5 ## E; \ |
63 | xorl -44(r9),r1 ## E; \ | 67 | xorl -44(r9),r1 ## E; \ |
64 | xorl -40(r9),r6 ## E; \ | 68 | xorl -40(r9),r6 ## E; \ |
@@ -128,8 +132,8 @@ FUNC: movq r1,r2; \ | |||
128 | movl r3 ## E,r1 ## E; \ | 132 | movl r3 ## E,r1 ## E; \ |
129 | movl r4 ## E,r2 ## E; | 133 | movl r4 ## E,r2 ## E; |
130 | 134 | ||
131 | #define entry(FUNC,BASE,B128,B192) \ | 135 | #define entry(FUNC,KEY,B128,B192) \ |
132 | prologue(FUNC,BASE,B128,B192,R2,R8,R7,R9,R1,R3,R4,R6,R10,R5,R11) | 136 | prologue(FUNC,KEY,B128,B192,R2,R8,R7,R9,R1,R3,R4,R6,R10,R5,R11) |
133 | 137 | ||
134 | #define return epilogue(R8,R2,R9,R7,R5,R6,R3,R4,R11) | 138 | #define return epilogue(R8,R2,R9,R7,R5,R6,R3,R4,R11) |
135 | 139 | ||
@@ -147,7 +151,7 @@ FUNC: movq r1,r2; \ | |||
147 | #define decrypt_final(TAB,OFFSET) \ | 151 | #define decrypt_final(TAB,OFFSET) \ |
148 | round(TAB,OFFSET,R2,R1,R4,R3,R6,R5,R7,R10,R5,R6,R3,R4) | 152 | round(TAB,OFFSET,R2,R1,R4,R3,R6,R5,R7,R10,R5,R6,R3,R4) |
149 | 153 | ||
150 | /* void aes_encrypt(void *ctx, u8 *out, const u8 *in) */ | 154 | /* void aes_encrypt(stuct crypto_tfm *tfm, u8 *out, const u8 *in) */ |
151 | 155 | ||
152 | entry(aes_encrypt,0,enc128,enc192) | 156 | entry(aes_encrypt,0,enc128,enc192) |
153 | encrypt_round(aes_ft_tab,-96) | 157 | encrypt_round(aes_ft_tab,-96) |
@@ -166,7 +170,7 @@ enc128: encrypt_round(aes_ft_tab,-32) | |||
166 | encrypt_final(aes_fl_tab,112) | 170 | encrypt_final(aes_fl_tab,112) |
167 | return | 171 | return |
168 | 172 | ||
169 | /* void aes_decrypt(void *ctx, u8 *out, const u8 *in) */ | 173 | /* void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) */ |
170 | 174 | ||
171 | entry(aes_decrypt,240,dec128,dec192) | 175 | entry(aes_decrypt,240,dec128,dec192) |
172 | decrypt_round(aes_it_tab,-96) | 176 | decrypt_round(aes_it_tab,-96) |
diff --git a/arch/x86_64/crypto/aes.c b/arch/x86_64/crypto/aes.c index 6f77e7700d32..d6f8e0463b5d 100644 --- a/arch/x86_64/crypto/aes.c +++ b/arch/x86_64/crypto/aes.c | |||
@@ -227,10 +227,10 @@ static void __init gen_tabs(void) | |||
227 | t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \ | 227 | t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \ |
228 | } | 228 | } |
229 | 229 | ||
230 | static int aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, | 230 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
231 | u32 *flags) | 231 | unsigned int key_len, u32 *flags) |
232 | { | 232 | { |
233 | struct aes_ctx *ctx = ctx_arg; | 233 | struct aes_ctx *ctx = crypto_tfm_ctx(tfm); |
234 | const __le32 *key = (const __le32 *)in_key; | 234 | const __le32 *key = (const __le32 *)in_key; |
235 | u32 i, j, t, u, v, w; | 235 | u32 i, j, t, u, v, w; |
236 | 236 | ||
@@ -283,8 +283,8 @@ static int aes_set_key(void *ctx_arg, const u8 *in_key, unsigned int key_len, | |||
283 | return 0; | 283 | return 0; |
284 | } | 284 | } |
285 | 285 | ||
286 | extern void aes_encrypt(void *ctx_arg, u8 *out, const u8 *in); | 286 | extern void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in); |
287 | extern void aes_decrypt(void *ctx_arg, u8 *out, const u8 *in); | 287 | extern void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in); |
288 | 288 | ||
289 | static struct crypto_alg aes_alg = { | 289 | static struct crypto_alg aes_alg = { |
290 | .cra_name = "aes", | 290 | .cra_name = "aes", |
diff --git a/arch/x86_64/kernel/asm-offsets.c b/arch/x86_64/kernel/asm-offsets.c index 38834bbbae11..96687e2beb2c 100644 --- a/arch/x86_64/kernel/asm-offsets.c +++ b/arch/x86_64/kernel/asm-offsets.c | |||
@@ -4,6 +4,7 @@ | |||
4 | * and format the required data. | 4 | * and format the required data. |
5 | */ | 5 | */ |
6 | 6 | ||
7 | #include <linux/crypto.h> | ||
7 | #include <linux/sched.h> | 8 | #include <linux/sched.h> |
8 | #include <linux/stddef.h> | 9 | #include <linux/stddef.h> |
9 | #include <linux/errno.h> | 10 | #include <linux/errno.h> |
@@ -68,5 +69,7 @@ int main(void) | |||
68 | DEFINE(pbe_next, offsetof(struct pbe, next)); | 69 | DEFINE(pbe_next, offsetof(struct pbe, next)); |
69 | BLANK(); | 70 | BLANK(); |
70 | DEFINE(TSS_ist, offsetof(struct tss_struct, ist)); | 71 | DEFINE(TSS_ist, offsetof(struct tss_struct, ist)); |
72 | BLANK(); | ||
73 | DEFINE(crypto_tfm_ctx_offset, offsetof(struct crypto_tfm, __crt_ctx)); | ||
71 | return 0; | 74 | return 0; |
72 | } | 75 | } |