aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2005-07-06 16:53:29 -0400
committerDavid S. Miller <davem@davemloft.net>2005-07-06 16:53:29 -0400
commitfbdae9f3e7fb57c07cb0d973f113eb25da2e8ff2 (patch)
treeb20909c92c2d48ab449343967b1c365732d7e4ff
parent176c3652c544b6f8d4bb1984c58c10080f45dbf0 (diff)
[CRYPTO] Ensure cit_iv is aligned correctly
This patch ensures that cit_iv is aligned according to cra_alignmask by allocating it as part of the tfm structure. As a side effect the crypto layer will also guarantee that the tfm ctx area has enough space to be aligned by cra_alignmask. This allows us to remove the extra space reservation from the Padlock driver. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> Signed-off-by: David S. Miller <davem@davemloft.net>
-rw-r--r--crypto/api.c32
-rw-r--r--crypto/cipher.c15
-rw-r--r--crypto/internal.h28
-rw-r--r--drivers/crypto/padlock-aes.c3
-rw-r--r--include/linux/crypto.h5
5 files changed, 72 insertions, 11 deletions
diff --git a/crypto/api.c b/crypto/api.c
index 0b583d24f7fa..2d8d828c0ca2 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -125,20 +125,46 @@ static void crypto_exit_ops(struct crypto_tfm *tfm)
125 } 125 }
126} 126}
127 127
128static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags)
129{
130 unsigned int len;
131
132 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
133 default:
134 BUG();
135
136 case CRYPTO_ALG_TYPE_CIPHER:
137 len = crypto_cipher_ctxsize(alg, flags);
138 break;
139
140 case CRYPTO_ALG_TYPE_DIGEST:
141 len = crypto_digest_ctxsize(alg, flags);
142 break;
143
144 case CRYPTO_ALG_TYPE_COMPRESS:
145 len = crypto_compress_ctxsize(alg, flags);
146 break;
147 }
148
149 return len + alg->cra_alignmask;
150}
151
128struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) 152struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags)
129{ 153{
130 struct crypto_tfm *tfm = NULL; 154 struct crypto_tfm *tfm = NULL;
131 struct crypto_alg *alg; 155 struct crypto_alg *alg;
156 unsigned int tfm_size;
132 157
133 alg = crypto_alg_mod_lookup(name); 158 alg = crypto_alg_mod_lookup(name);
134 if (alg == NULL) 159 if (alg == NULL)
135 goto out; 160 goto out;
136 161
137 tfm = kmalloc(sizeof(*tfm) + alg->cra_ctxsize, GFP_KERNEL); 162 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags);
163 tfm = kmalloc(tfm_size, GFP_KERNEL);
138 if (tfm == NULL) 164 if (tfm == NULL)
139 goto out_put; 165 goto out_put;
140 166
141 memset(tfm, 0, sizeof(*tfm) + alg->cra_ctxsize); 167 memset(tfm, 0, tfm_size);
142 168
143 tfm->__crt_alg = alg; 169 tfm->__crt_alg = alg;
144 170
diff --git a/crypto/cipher.c b/crypto/cipher.c
index 85eb12f8e564..d3295ce14a57 100644
--- a/crypto/cipher.c
+++ b/crypto/cipher.c
@@ -41,7 +41,7 @@ static unsigned int crypt_slow(const struct cipher_desc *desc,
41 struct scatter_walk *in, 41 struct scatter_walk *in,
42 struct scatter_walk *out, unsigned int bsize) 42 struct scatter_walk *out, unsigned int bsize)
43{ 43{
44 unsigned int alignmask = desc->tfm->__crt_alg->cra_alignmask; 44 unsigned int alignmask = crypto_tfm_alg_alignmask(desc->tfm);
45 u8 buffer[bsize * 2 + alignmask]; 45 u8 buffer[bsize * 2 + alignmask];
46 u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 46 u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
47 u8 *dst = src + bsize; 47 u8 *dst = src + bsize;
@@ -98,7 +98,7 @@ static int crypt(const struct cipher_desc *desc,
98 struct scatter_walk walk_in, walk_out; 98 struct scatter_walk walk_in, walk_out;
99 struct crypto_tfm *tfm = desc->tfm; 99 struct crypto_tfm *tfm = desc->tfm;
100 const unsigned int bsize = crypto_tfm_alg_blocksize(tfm); 100 const unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
101 unsigned int alignmask = tfm->__crt_alg->cra_alignmask; 101 unsigned int alignmask = crypto_tfm_alg_alignmask(tfm);
102 unsigned long buffer = 0; 102 unsigned long buffer = 0;
103 103
104 if (!nbytes) 104 if (!nbytes)
@@ -399,6 +399,8 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm)
399 } 399 }
400 400
401 if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) { 401 if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) {
402 unsigned int align;
403 unsigned long addr;
402 404
403 switch (crypto_tfm_alg_blocksize(tfm)) { 405 switch (crypto_tfm_alg_blocksize(tfm)) {
404 case 8: 406 case 8:
@@ -418,9 +420,11 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm)
418 } 420 }
419 421
420 ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm); 422 ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm);
421 ops->cit_iv = kmalloc(ops->cit_ivsize, GFP_KERNEL); 423 align = crypto_tfm_alg_alignmask(tfm) + 1;
422 if (ops->cit_iv == NULL) 424 addr = (unsigned long)crypto_tfm_ctx(tfm);
423 ret = -ENOMEM; 425 addr = ALIGN(addr, align);
426 addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align);
427 ops->cit_iv = (void *)addr;
424 } 428 }
425 429
426out: 430out:
@@ -429,5 +433,4 @@ out:
429 433
430void crypto_exit_cipher_ops(struct crypto_tfm *tfm) 434void crypto_exit_cipher_ops(struct crypto_tfm *tfm)
431{ 435{
432 kfree(tfm->crt_cipher.cit_iv);
433} 436}
diff --git a/crypto/internal.h b/crypto/internal.h
index 83b1b6d6d92b..68612874b5fd 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -16,6 +16,7 @@
16#include <linux/highmem.h> 16#include <linux/highmem.h>
17#include <linux/interrupt.h> 17#include <linux/interrupt.h>
18#include <linux/init.h> 18#include <linux/init.h>
19#include <linux/kernel.h>
19#include <asm/kmap_types.h> 20#include <asm/kmap_types.h>
20 21
21extern enum km_type crypto_km_types[]; 22extern enum km_type crypto_km_types[];
@@ -61,6 +62,33 @@ static inline void crypto_init_proc(void)
61{ } 62{ }
62#endif 63#endif
63 64
65static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg,
66 int flags)
67{
68 return alg->cra_ctxsize;
69}
70
71static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg,
72 int flags)
73{
74 unsigned int len = alg->cra_ctxsize;
75
76 switch (flags & CRYPTO_TFM_MODE_MASK) {
77 case CRYPTO_TFM_MODE_CBC:
78 len = ALIGN(len, alg->cra_alignmask + 1);
79 len += alg->cra_blocksize;
80 break;
81 }
82
83 return len;
84}
85
86static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg,
87 int flags)
88{
89 return alg->cra_ctxsize;
90}
91
64int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags); 92int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags);
65int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags); 93int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags);
66int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags); 94int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags);
diff --git a/drivers/crypto/padlock-aes.c b/drivers/crypto/padlock-aes.c
index d2745ff4699c..c5b58fae95f2 100644
--- a/drivers/crypto/padlock-aes.c
+++ b/drivers/crypto/padlock-aes.c
@@ -465,8 +465,7 @@ static struct crypto_alg aes_alg = {
465 .cra_name = "aes", 465 .cra_name = "aes",
466 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 466 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
467 .cra_blocksize = AES_BLOCK_SIZE, 467 .cra_blocksize = AES_BLOCK_SIZE,
468 .cra_ctxsize = sizeof(struct aes_ctx) + 468 .cra_ctxsize = sizeof(struct aes_ctx),
469 PADLOCK_ALIGNMENT,
470 .cra_alignmask = PADLOCK_ALIGNMENT - 1, 469 .cra_alignmask = PADLOCK_ALIGNMENT - 1,
471 .cra_module = THIS_MODULE, 470 .cra_module = THIS_MODULE,
472 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), 471 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
diff --git a/include/linux/crypto.h b/include/linux/crypto.h
index ac9d49beecd3..5e2bcc636a02 100644
--- a/include/linux/crypto.h
+++ b/include/linux/crypto.h
@@ -288,6 +288,11 @@ static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm *tfm)
288 return tfm->__crt_alg->cra_digest.dia_digestsize; 288 return tfm->__crt_alg->cra_digest.dia_digestsize;
289} 289}
290 290
291static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
292{
293 return tfm->__crt_alg->cra_alignmask;
294}
295
291static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) 296static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
292{ 297{
293 return (void *)&tfm[1]; 298 return (void *)&tfm[1];