diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2005-07-06 16:53:29 -0400 |
---|---|---|
committer | David S. Miller <davem@davemloft.net> | 2005-07-06 16:53:29 -0400 |
commit | fbdae9f3e7fb57c07cb0d973f113eb25da2e8ff2 (patch) | |
tree | b20909c92c2d48ab449343967b1c365732d7e4ff /crypto | |
parent | 176c3652c544b6f8d4bb1984c58c10080f45dbf0 (diff) |
[CRYPTO] Ensure cit_iv is aligned correctly
This patch ensures that cit_iv is aligned according to cra_alignmask
by allocating it as part of the tfm structure. As a side effect the
crypto layer will also guarantee that the tfm ctx area has enough space
to be aligned by cra_alignmask. This allows us to remove the extra
space reservation from the Padlock driver.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/api.c | 32 | ||||
-rw-r--r-- | crypto/cipher.c | 15 | ||||
-rw-r--r-- | crypto/internal.h | 28 |
3 files changed, 66 insertions, 9 deletions
diff --git a/crypto/api.c b/crypto/api.c index 0b583d24f7fa..2d8d828c0ca2 100644 --- a/crypto/api.c +++ b/crypto/api.c | |||
@@ -125,20 +125,46 @@ static void crypto_exit_ops(struct crypto_tfm *tfm) | |||
125 | } | 125 | } |
126 | } | 126 | } |
127 | 127 | ||
128 | static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags) | ||
129 | { | ||
130 | unsigned int len; | ||
131 | |||
132 | switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { | ||
133 | default: | ||
134 | BUG(); | ||
135 | |||
136 | case CRYPTO_ALG_TYPE_CIPHER: | ||
137 | len = crypto_cipher_ctxsize(alg, flags); | ||
138 | break; | ||
139 | |||
140 | case CRYPTO_ALG_TYPE_DIGEST: | ||
141 | len = crypto_digest_ctxsize(alg, flags); | ||
142 | break; | ||
143 | |||
144 | case CRYPTO_ALG_TYPE_COMPRESS: | ||
145 | len = crypto_compress_ctxsize(alg, flags); | ||
146 | break; | ||
147 | } | ||
148 | |||
149 | return len + alg->cra_alignmask; | ||
150 | } | ||
151 | |||
128 | struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) | 152 | struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) |
129 | { | 153 | { |
130 | struct crypto_tfm *tfm = NULL; | 154 | struct crypto_tfm *tfm = NULL; |
131 | struct crypto_alg *alg; | 155 | struct crypto_alg *alg; |
156 | unsigned int tfm_size; | ||
132 | 157 | ||
133 | alg = crypto_alg_mod_lookup(name); | 158 | alg = crypto_alg_mod_lookup(name); |
134 | if (alg == NULL) | 159 | if (alg == NULL) |
135 | goto out; | 160 | goto out; |
136 | 161 | ||
137 | tfm = kmalloc(sizeof(*tfm) + alg->cra_ctxsize, GFP_KERNEL); | 162 | tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags); |
163 | tfm = kmalloc(tfm_size, GFP_KERNEL); | ||
138 | if (tfm == NULL) | 164 | if (tfm == NULL) |
139 | goto out_put; | 165 | goto out_put; |
140 | 166 | ||
141 | memset(tfm, 0, sizeof(*tfm) + alg->cra_ctxsize); | 167 | memset(tfm, 0, tfm_size); |
142 | 168 | ||
143 | tfm->__crt_alg = alg; | 169 | tfm->__crt_alg = alg; |
144 | 170 | ||
diff --git a/crypto/cipher.c b/crypto/cipher.c index 85eb12f8e564..d3295ce14a57 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c | |||
@@ -41,7 +41,7 @@ static unsigned int crypt_slow(const struct cipher_desc *desc, | |||
41 | struct scatter_walk *in, | 41 | struct scatter_walk *in, |
42 | struct scatter_walk *out, unsigned int bsize) | 42 | struct scatter_walk *out, unsigned int bsize) |
43 | { | 43 | { |
44 | unsigned int alignmask = desc->tfm->__crt_alg->cra_alignmask; | 44 | unsigned int alignmask = crypto_tfm_alg_alignmask(desc->tfm); |
45 | u8 buffer[bsize * 2 + alignmask]; | 45 | u8 buffer[bsize * 2 + alignmask]; |
46 | u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | 46 | u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); |
47 | u8 *dst = src + bsize; | 47 | u8 *dst = src + bsize; |
@@ -98,7 +98,7 @@ static int crypt(const struct cipher_desc *desc, | |||
98 | struct scatter_walk walk_in, walk_out; | 98 | struct scatter_walk walk_in, walk_out; |
99 | struct crypto_tfm *tfm = desc->tfm; | 99 | struct crypto_tfm *tfm = desc->tfm; |
100 | const unsigned int bsize = crypto_tfm_alg_blocksize(tfm); | 100 | const unsigned int bsize = crypto_tfm_alg_blocksize(tfm); |
101 | unsigned int alignmask = tfm->__crt_alg->cra_alignmask; | 101 | unsigned int alignmask = crypto_tfm_alg_alignmask(tfm); |
102 | unsigned long buffer = 0; | 102 | unsigned long buffer = 0; |
103 | 103 | ||
104 | if (!nbytes) | 104 | if (!nbytes) |
@@ -399,6 +399,8 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm) | |||
399 | } | 399 | } |
400 | 400 | ||
401 | if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) { | 401 | if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) { |
402 | unsigned int align; | ||
403 | unsigned long addr; | ||
402 | 404 | ||
403 | switch (crypto_tfm_alg_blocksize(tfm)) { | 405 | switch (crypto_tfm_alg_blocksize(tfm)) { |
404 | case 8: | 406 | case 8: |
@@ -418,9 +420,11 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm) | |||
418 | } | 420 | } |
419 | 421 | ||
420 | ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm); | 422 | ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm); |
421 | ops->cit_iv = kmalloc(ops->cit_ivsize, GFP_KERNEL); | 423 | align = crypto_tfm_alg_alignmask(tfm) + 1; |
422 | if (ops->cit_iv == NULL) | 424 | addr = (unsigned long)crypto_tfm_ctx(tfm); |
423 | ret = -ENOMEM; | 425 | addr = ALIGN(addr, align); |
426 | addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align); | ||
427 | ops->cit_iv = (void *)addr; | ||
424 | } | 428 | } |
425 | 429 | ||
426 | out: | 430 | out: |
@@ -429,5 +433,4 @@ out: | |||
429 | 433 | ||
430 | void crypto_exit_cipher_ops(struct crypto_tfm *tfm) | 434 | void crypto_exit_cipher_ops(struct crypto_tfm *tfm) |
431 | { | 435 | { |
432 | kfree(tfm->crt_cipher.cit_iv); | ||
433 | } | 436 | } |
diff --git a/crypto/internal.h b/crypto/internal.h index 83b1b6d6d92b..68612874b5fd 100644 --- a/crypto/internal.h +++ b/crypto/internal.h | |||
@@ -16,6 +16,7 @@ | |||
16 | #include <linux/highmem.h> | 16 | #include <linux/highmem.h> |
17 | #include <linux/interrupt.h> | 17 | #include <linux/interrupt.h> |
18 | #include <linux/init.h> | 18 | #include <linux/init.h> |
19 | #include <linux/kernel.h> | ||
19 | #include <asm/kmap_types.h> | 20 | #include <asm/kmap_types.h> |
20 | 21 | ||
21 | extern enum km_type crypto_km_types[]; | 22 | extern enum km_type crypto_km_types[]; |
@@ -61,6 +62,33 @@ static inline void crypto_init_proc(void) | |||
61 | { } | 62 | { } |
62 | #endif | 63 | #endif |
63 | 64 | ||
65 | static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg, | ||
66 | int flags) | ||
67 | { | ||
68 | return alg->cra_ctxsize; | ||
69 | } | ||
70 | |||
71 | static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg, | ||
72 | int flags) | ||
73 | { | ||
74 | unsigned int len = alg->cra_ctxsize; | ||
75 | |||
76 | switch (flags & CRYPTO_TFM_MODE_MASK) { | ||
77 | case CRYPTO_TFM_MODE_CBC: | ||
78 | len = ALIGN(len, alg->cra_alignmask + 1); | ||
79 | len += alg->cra_blocksize; | ||
80 | break; | ||
81 | } | ||
82 | |||
83 | return len; | ||
84 | } | ||
85 | |||
86 | static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg, | ||
87 | int flags) | ||
88 | { | ||
89 | return alg->cra_ctxsize; | ||
90 | } | ||
91 | |||
64 | int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags); | 92 | int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags); |
65 | int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags); | 93 | int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags); |
66 | int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags); | 94 | int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags); |