aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2005-07-06 16:52:09 -0400
committerDavid S. Miller <davem@davemloft.net>2005-07-06 16:52:09 -0400
commit95477377995aefa2ec1654a9a3777bd57ea99146 (patch)
tree7aa4d6173de13c81c2fa0e4d2f9e0de22e141b6a
parent40725181b74be6b0e3bdc8c05bd1e0b9873ec5cc (diff)
[CRYPTO] Add alignmask for low-level cipher implementations
The VIA Padlock device requires the input and output buffers to be aligned on 16-byte boundaries. This patch adds the alignmask attribute for low-level cipher implementations to indicate their alignment requirements. The mid-level crypt() function will copy the input/output buffers if they are not aligned correctly before they are passed to the low-level implementation. Strictly speaking, some of the software implementations require the buffers to be aligned on 4-byte boundaries as they do 32-bit loads. However, it is not clear whether it is better to copy the buffers or pay the penalty for unaligned loads/stores. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au> Signed-off-by: David S. Miller <davem@davemloft.net>
-rw-r--r--crypto/api.c6
-rw-r--r--crypto/cipher.c43
-rw-r--r--crypto/scatterwalk.h6
-rw-r--r--include/linux/crypto.h1
4 files changed, 49 insertions, 7 deletions
diff --git a/crypto/api.c b/crypto/api.c
index 394169a8577d..f55856b21992 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -168,6 +168,12 @@ int crypto_register_alg(struct crypto_alg *alg)
168{ 168{
169 int ret = 0; 169 int ret = 0;
170 struct crypto_alg *q; 170 struct crypto_alg *q;
171
172 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
173 return -EINVAL;
174
175 if (alg->cra_alignmask > PAGE_SIZE)
176 return -EINVAL;
171 177
172 down_write(&crypto_alg_sem); 178 down_write(&crypto_alg_sem);
173 179
diff --git a/crypto/cipher.c b/crypto/cipher.c
index 54c4a560070d..85eb12f8e564 100644
--- a/crypto/cipher.c
+++ b/crypto/cipher.c
@@ -41,8 +41,10 @@ static unsigned int crypt_slow(const struct cipher_desc *desc,
41 struct scatter_walk *in, 41 struct scatter_walk *in,
42 struct scatter_walk *out, unsigned int bsize) 42 struct scatter_walk *out, unsigned int bsize)
43{ 43{
44 u8 src[bsize]; 44 unsigned int alignmask = desc->tfm->__crt_alg->cra_alignmask;
45 u8 dst[bsize]; 45 u8 buffer[bsize * 2 + alignmask];
46 u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
47 u8 *dst = src + bsize;
46 unsigned int n; 48 unsigned int n;
47 49
48 n = scatterwalk_copychunks(src, in, bsize, 0); 50 n = scatterwalk_copychunks(src, in, bsize, 0);
@@ -59,15 +61,24 @@ static unsigned int crypt_slow(const struct cipher_desc *desc,
59static inline unsigned int crypt_fast(const struct cipher_desc *desc, 61static inline unsigned int crypt_fast(const struct cipher_desc *desc,
60 struct scatter_walk *in, 62 struct scatter_walk *in,
61 struct scatter_walk *out, 63 struct scatter_walk *out,
62 unsigned int nbytes) 64 unsigned int nbytes, u8 *tmp)
63{ 65{
64 u8 *src, *dst; 66 u8 *src, *dst;
65 67
66 src = in->data; 68 src = in->data;
67 dst = scatterwalk_samebuf(in, out) ? src : out->data; 69 dst = scatterwalk_samebuf(in, out) ? src : out->data;
68 70
71 if (tmp) {
72 memcpy(tmp, in->data, nbytes);
73 src = tmp;
74 dst = tmp;
75 }
76
69 nbytes = desc->prfn(desc, dst, src, nbytes); 77 nbytes = desc->prfn(desc, dst, src, nbytes);
70 78
79 if (tmp)
80 memcpy(out->data, tmp, nbytes);
81
71 scatterwalk_advance(in, nbytes); 82 scatterwalk_advance(in, nbytes);
72 scatterwalk_advance(out, nbytes); 83 scatterwalk_advance(out, nbytes);
73 84
@@ -87,6 +98,8 @@ static int crypt(const struct cipher_desc *desc,
87 struct scatter_walk walk_in, walk_out; 98 struct scatter_walk walk_in, walk_out;
88 struct crypto_tfm *tfm = desc->tfm; 99 struct crypto_tfm *tfm = desc->tfm;
89 const unsigned int bsize = crypto_tfm_alg_blocksize(tfm); 100 const unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
101 unsigned int alignmask = tfm->__crt_alg->cra_alignmask;
102 unsigned long buffer = 0;
90 103
91 if (!nbytes) 104 if (!nbytes)
92 return 0; 105 return 0;
@@ -100,16 +113,27 @@ static int crypt(const struct cipher_desc *desc,
100 scatterwalk_start(&walk_out, dst); 113 scatterwalk_start(&walk_out, dst);
101 114
102 for(;;) { 115 for(;;) {
103 unsigned int n; 116 unsigned int n = nbytes;
117 u8 *tmp = NULL;
118
119 if (!scatterwalk_aligned(&walk_in, alignmask) ||
120 !scatterwalk_aligned(&walk_out, alignmask)) {
121 if (!buffer) {
122 buffer = __get_free_page(GFP_ATOMIC);
123 if (!buffer)
124 n = 0;
125 }
126 tmp = (u8 *)buffer;
127 }
104 128
105 scatterwalk_map(&walk_in, 0); 129 scatterwalk_map(&walk_in, 0);
106 scatterwalk_map(&walk_out, 1); 130 scatterwalk_map(&walk_out, 1);
107 131
108 n = scatterwalk_clamp(&walk_in, nbytes); 132 n = scatterwalk_clamp(&walk_in, n);
109 n = scatterwalk_clamp(&walk_out, n); 133 n = scatterwalk_clamp(&walk_out, n);
110 134
111 if (likely(n >= bsize)) 135 if (likely(n >= bsize))
112 n = crypt_fast(desc, &walk_in, &walk_out, n); 136 n = crypt_fast(desc, &walk_in, &walk_out, n, tmp);
113 else 137 else
114 n = crypt_slow(desc, &walk_in, &walk_out, bsize); 138 n = crypt_slow(desc, &walk_in, &walk_out, bsize);
115 139
@@ -119,10 +143,15 @@ static int crypt(const struct cipher_desc *desc,
119 scatterwalk_done(&walk_out, 1, nbytes); 143 scatterwalk_done(&walk_out, 1, nbytes);
120 144
121 if (!nbytes) 145 if (!nbytes)
122 return 0; 146 break;
123 147
124 crypto_yield(tfm); 148 crypto_yield(tfm);
125 } 149 }
150
151 if (buffer)
152 free_page(buffer);
153
154 return 0;
126} 155}
127 156
128static unsigned int cbc_process_encrypt(const struct cipher_desc *desc, 157static unsigned int cbc_process_encrypt(const struct cipher_desc *desc,
diff --git a/crypto/scatterwalk.h b/crypto/scatterwalk.h
index 5495bb970816..e79925c474a3 100644
--- a/crypto/scatterwalk.h
+++ b/crypto/scatterwalk.h
@@ -55,6 +55,12 @@ static inline void scatterwalk_advance(struct scatter_walk *walk,
55 walk->len_this_segment -= nbytes; 55 walk->len_this_segment -= nbytes;
56} 56}
57 57
58static inline unsigned int scatterwalk_aligned(struct scatter_walk *walk,
59 unsigned int alignmask)
60{
61 return !(walk->offset & alignmask);
62}
63
58void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg); 64void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg);
59int scatterwalk_copychunks(void *buf, struct scatter_walk *walk, size_t nbytes, int out); 65int scatterwalk_copychunks(void *buf, struct scatter_walk *walk, size_t nbytes, int out);
60void scatterwalk_map(struct scatter_walk *walk, int out); 66void scatterwalk_map(struct scatter_walk *walk, int out);
diff --git a/include/linux/crypto.h b/include/linux/crypto.h
index 26ce01c25745..ac9d49beecd3 100644
--- a/include/linux/crypto.h
+++ b/include/linux/crypto.h
@@ -124,6 +124,7 @@ struct crypto_alg {
124 u32 cra_flags; 124 u32 cra_flags;
125 unsigned int cra_blocksize; 125 unsigned int cra_blocksize;
126 unsigned int cra_ctxsize; 126 unsigned int cra_ctxsize;
127 unsigned int cra_alignmask;
127 const char cra_name[CRYPTO_MAX_ALG_NAME]; 128 const char cra_name[CRYPTO_MAX_ALG_NAME];
128 129
129 union { 130 union {