diff options
-rw-r--r-- | crypto/cipher.c | 246 | ||||
-rw-r--r-- | crypto/scatterwalk.c | 4 | ||||
-rw-r--r-- | crypto/scatterwalk.h | 6 |
3 files changed, 161 insertions, 95 deletions
diff --git a/crypto/cipher.c b/crypto/cipher.c index 69264497b48c..c4243345b154 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c | |||
@@ -4,6 +4,7 @@ | |||
4 | * Cipher operations. | 4 | * Cipher operations. |
5 | * | 5 | * |
6 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> | 6 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> |
7 | * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> | ||
7 | * | 8 | * |
8 | * This program is free software; you can redistribute it and/or modify it | 9 | * This program is free software; you can redistribute it and/or modify it |
9 | * under the terms of the GNU General Public License as published by the Free | 10 | * under the terms of the GNU General Public License as published by the Free |
@@ -22,9 +23,13 @@ | |||
22 | #include "internal.h" | 23 | #include "internal.h" |
23 | #include "scatterwalk.h" | 24 | #include "scatterwalk.h" |
24 | 25 | ||
25 | typedef void (cryptfn_t)(void *, u8 *, const u8 *); | 26 | struct cipher_desc { |
26 | typedef void (procfn_t)(struct crypto_tfm *, u8 *, | 27 | struct crypto_tfm *tfm; |
27 | u8*, cryptfn_t, void *); | 28 | void (*crfn)(void *ctx, u8 *dst, const u8 *src); |
29 | unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst, | ||
30 | const u8 *src, unsigned int nbytes); | ||
31 | void *info; | ||
32 | }; | ||
28 | 33 | ||
29 | static inline void xor_64(u8 *a, const u8 *b) | 34 | static inline void xor_64(u8 *a, const u8 *b) |
30 | { | 35 | { |
@@ -39,63 +44,57 @@ static inline void xor_128(u8 *a, const u8 *b) | |||
39 | ((u32 *)a)[2] ^= ((u32 *)b)[2]; | 44 | ((u32 *)a)[2] ^= ((u32 *)b)[2]; |
40 | ((u32 *)a)[3] ^= ((u32 *)b)[3]; | 45 | ((u32 *)a)[3] ^= ((u32 *)b)[3]; |
41 | } | 46 | } |
42 | 47 | ||
43 | static inline void *prepare_src(struct scatter_walk *walk, int bsize, | 48 | static unsigned int crypt_slow(const struct cipher_desc *desc, |
44 | void *tmp, int in_place) | 49 | struct scatter_walk *in, |
50 | struct scatter_walk *out, unsigned int bsize) | ||
45 | { | 51 | { |
46 | void *src = walk->data; | 52 | u8 src[bsize]; |
47 | int n = bsize; | 53 | u8 dst[bsize]; |
54 | unsigned int n; | ||
48 | 55 | ||
49 | if (unlikely(scatterwalk_across_pages(walk, bsize))) { | 56 | n = scatterwalk_copychunks(src, in, bsize, 0); |
50 | src = tmp; | 57 | scatterwalk_advance(in, n); |
51 | n = scatterwalk_copychunks(src, walk, bsize, 0); | ||
52 | } | ||
53 | scatterwalk_advance(walk, n); | ||
54 | return src; | ||
55 | } | ||
56 | 58 | ||
57 | static inline void *prepare_dst(struct scatter_walk *walk, int bsize, | 59 | desc->prfn(desc, dst, src, bsize); |
58 | void *tmp, int in_place) | ||
59 | { | ||
60 | void *dst = walk->data; | ||
61 | 60 | ||
62 | if (unlikely(scatterwalk_across_pages(walk, bsize)) || in_place) | 61 | n = scatterwalk_copychunks(dst, out, bsize, 1); |
63 | dst = tmp; | 62 | scatterwalk_advance(out, n); |
64 | return dst; | ||
65 | } | ||
66 | 63 | ||
67 | static inline void complete_src(struct scatter_walk *walk, int bsize, | 64 | return bsize; |
68 | void *src, int in_place) | ||
69 | { | ||
70 | } | 65 | } |
71 | 66 | ||
72 | static inline void complete_dst(struct scatter_walk *walk, int bsize, | 67 | static inline unsigned int crypt_fast(const struct cipher_desc *desc, |
73 | void *dst, int in_place) | 68 | struct scatter_walk *in, |
69 | struct scatter_walk *out, | ||
70 | unsigned int nbytes) | ||
74 | { | 71 | { |
75 | int n = bsize; | 72 | u8 *src, *dst; |
73 | |||
74 | src = in->data; | ||
75 | dst = scatterwalk_samebuf(in, out) ? src : out->data; | ||
76 | |||
77 | nbytes = desc->prfn(desc, dst, src, nbytes); | ||
78 | |||
79 | scatterwalk_advance(in, nbytes); | ||
80 | scatterwalk_advance(out, nbytes); | ||
76 | 81 | ||
77 | if (unlikely(scatterwalk_across_pages(walk, bsize))) | 82 | return nbytes; |
78 | n = scatterwalk_copychunks(dst, walk, bsize, 1); | ||
79 | else if (in_place) | ||
80 | memcpy(walk->data, dst, bsize); | ||
81 | scatterwalk_advance(walk, n); | ||
82 | } | 83 | } |
83 | 84 | ||
84 | /* | 85 | /* |
85 | * Generic encrypt/decrypt wrapper for ciphers, handles operations across | 86 | * Generic encrypt/decrypt wrapper for ciphers, handles operations across |
86 | * multiple page boundaries by using temporary blocks. In user context, | 87 | * multiple page boundaries by using temporary blocks. In user context, |
87 | * the kernel is given a chance to schedule us once per block. | 88 | * the kernel is given a chance to schedule us once per page. |
88 | */ | 89 | */ |
89 | static int crypt(struct crypto_tfm *tfm, | 90 | static int crypt(const struct cipher_desc *desc, |
90 | struct scatterlist *dst, | 91 | struct scatterlist *dst, |
91 | struct scatterlist *src, | 92 | struct scatterlist *src, |
92 | unsigned int nbytes, cryptfn_t crfn, | 93 | unsigned int nbytes) |
93 | procfn_t prfn, void *info) | ||
94 | { | 94 | { |
95 | struct scatter_walk walk_in, walk_out; | 95 | struct scatter_walk walk_in, walk_out; |
96 | struct crypto_tfm *tfm = desc->tfm; | ||
96 | const unsigned int bsize = crypto_tfm_alg_blocksize(tfm); | 97 | const unsigned int bsize = crypto_tfm_alg_blocksize(tfm); |
97 | u8 tmp_src[bsize]; | ||
98 | u8 tmp_dst[bsize]; | ||
99 | 98 | ||
100 | if (!nbytes) | 99 | if (!nbytes) |
101 | return 0; | 100 | return 0; |
@@ -109,29 +108,20 @@ static int crypt(struct crypto_tfm *tfm, | |||
109 | scatterwalk_start(&walk_out, dst); | 108 | scatterwalk_start(&walk_out, dst); |
110 | 109 | ||
111 | for(;;) { | 110 | for(;;) { |
112 | u8 *src_p, *dst_p; | 111 | unsigned int n; |
113 | int in_place; | ||
114 | 112 | ||
115 | scatterwalk_map(&walk_in, 0); | 113 | scatterwalk_map(&walk_in, 0); |
116 | scatterwalk_map(&walk_out, 1); | 114 | scatterwalk_map(&walk_out, 1); |
117 | 115 | ||
118 | in_place = scatterwalk_samebuf(&walk_in, &walk_out); | 116 | n = scatterwalk_clamp(&walk_in, nbytes); |
117 | n = scatterwalk_clamp(&walk_out, n); | ||
119 | 118 | ||
120 | do { | 119 | if (likely(n >= bsize)) |
121 | src_p = prepare_src(&walk_in, bsize, tmp_src, | 120 | n = crypt_fast(desc, &walk_in, &walk_out, n); |
122 | in_place); | 121 | else |
123 | dst_p = prepare_dst(&walk_out, bsize, tmp_dst, | 122 | n = crypt_slow(desc, &walk_in, &walk_out, bsize); |
124 | in_place); | ||
125 | 123 | ||
126 | prfn(tfm, dst_p, src_p, crfn, info); | 124 | nbytes -= n; |
127 | |||
128 | complete_src(&walk_in, bsize, src_p, in_place); | ||
129 | complete_dst(&walk_out, bsize, dst_p, in_place); | ||
130 | |||
131 | nbytes -= bsize; | ||
132 | } while (nbytes && | ||
133 | !scatterwalk_across_pages(&walk_in, bsize) && | ||
134 | !scatterwalk_across_pages(&walk_out, bsize)); | ||
135 | 125 | ||
136 | scatterwalk_done(&walk_in, 0, nbytes); | 126 | scatterwalk_done(&walk_in, 0, nbytes); |
137 | scatterwalk_done(&walk_out, 1, nbytes); | 127 | scatterwalk_done(&walk_out, 1, nbytes); |
@@ -143,30 +133,78 @@ static int crypt(struct crypto_tfm *tfm, | |||
143 | } | 133 | } |
144 | } | 134 | } |
145 | 135 | ||
146 | static void cbc_process_encrypt(struct crypto_tfm *tfm, u8 *dst, u8 *src, | 136 | static unsigned int cbc_process_encrypt(const struct cipher_desc *desc, |
147 | cryptfn_t fn, void *info) | 137 | u8 *dst, const u8 *src, |
138 | unsigned int nbytes) | ||
148 | { | 139 | { |
149 | u8 *iv = info; | 140 | struct crypto_tfm *tfm = desc->tfm; |
141 | void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; | ||
142 | int bsize = crypto_tfm_alg_blocksize(tfm); | ||
143 | |||
144 | void (*fn)(void *, u8 *, const u8 *) = desc->crfn; | ||
145 | u8 *iv = desc->info; | ||
146 | unsigned int done = 0; | ||
147 | |||
148 | do { | ||
149 | xor(iv, src); | ||
150 | fn(crypto_tfm_ctx(tfm), dst, iv); | ||
151 | memcpy(iv, dst, bsize); | ||
150 | 152 | ||
151 | tfm->crt_u.cipher.cit_xor_block(iv, src); | 153 | src += bsize; |
152 | fn(crypto_tfm_ctx(tfm), dst, iv); | 154 | dst += bsize; |
153 | memcpy(iv, dst, crypto_tfm_alg_blocksize(tfm)); | 155 | } while ((done += bsize) < nbytes); |
156 | |||
157 | return done; | ||
154 | } | 158 | } |
155 | 159 | ||
156 | static void cbc_process_decrypt(struct crypto_tfm *tfm, u8 *dst, u8 *src, | 160 | static unsigned int cbc_process_decrypt(const struct cipher_desc *desc, |
157 | cryptfn_t fn, void *info) | 161 | u8 *dst, const u8 *src, |
162 | unsigned int nbytes) | ||
158 | { | 163 | { |
159 | u8 *iv = info; | 164 | struct crypto_tfm *tfm = desc->tfm; |
165 | void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; | ||
166 | int bsize = crypto_tfm_alg_blocksize(tfm); | ||
167 | |||
168 | u8 stack[src == dst ? bsize : 0]; | ||
169 | u8 *buf = stack; | ||
170 | u8 **dst_p = src == dst ? &buf : &dst; | ||
171 | |||
172 | void (*fn)(void *, u8 *, const u8 *) = desc->crfn; | ||
173 | u8 *iv = desc->info; | ||
174 | unsigned int done = 0; | ||
175 | |||
176 | do { | ||
177 | u8 *tmp_dst = *dst_p; | ||
160 | 178 | ||
161 | fn(crypto_tfm_ctx(tfm), dst, src); | 179 | fn(crypto_tfm_ctx(tfm), tmp_dst, src); |
162 | tfm->crt_u.cipher.cit_xor_block(dst, iv); | 180 | xor(tmp_dst, iv); |
163 | memcpy(iv, src, crypto_tfm_alg_blocksize(tfm)); | 181 | memcpy(iv, src, bsize); |
182 | if (tmp_dst != dst) | ||
183 | memcpy(dst, tmp_dst, bsize); | ||
184 | |||
185 | src += bsize; | ||
186 | dst += bsize; | ||
187 | } while ((done += bsize) < nbytes); | ||
188 | |||
189 | return done; | ||
164 | } | 190 | } |
165 | 191 | ||
166 | static void ecb_process(struct crypto_tfm *tfm, u8 *dst, u8 *src, | 192 | static unsigned int ecb_process(const struct cipher_desc *desc, u8 *dst, |
167 | cryptfn_t fn, void *info) | 193 | const u8 *src, unsigned int nbytes) |
168 | { | 194 | { |
169 | fn(crypto_tfm_ctx(tfm), dst, src); | 195 | struct crypto_tfm *tfm = desc->tfm; |
196 | int bsize = crypto_tfm_alg_blocksize(tfm); | ||
197 | void (*fn)(void *, u8 *, const u8 *) = desc->crfn; | ||
198 | unsigned int done = 0; | ||
199 | |||
200 | do { | ||
201 | fn(crypto_tfm_ctx(tfm), dst, src); | ||
202 | |||
203 | src += bsize; | ||
204 | dst += bsize; | ||
205 | } while ((done += bsize) < nbytes); | ||
206 | |||
207 | return done; | ||
170 | } | 208 | } |
171 | 209 | ||
172 | static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) | 210 | static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) |
@@ -185,9 +223,13 @@ static int ecb_encrypt(struct crypto_tfm *tfm, | |||
185 | struct scatterlist *dst, | 223 | struct scatterlist *dst, |
186 | struct scatterlist *src, unsigned int nbytes) | 224 | struct scatterlist *src, unsigned int nbytes) |
187 | { | 225 | { |
188 | return crypt(tfm, dst, src, nbytes, | 226 | struct cipher_desc desc; |
189 | tfm->__crt_alg->cra_cipher.cia_encrypt, | 227 | |
190 | ecb_process, NULL); | 228 | desc.tfm = tfm; |
229 | desc.crfn = tfm->__crt_alg->cra_cipher.cia_encrypt; | ||
230 | desc.prfn = ecb_process; | ||
231 | |||
232 | return crypt(&desc, dst, src, nbytes); | ||
191 | } | 233 | } |
192 | 234 | ||
193 | static int ecb_decrypt(struct crypto_tfm *tfm, | 235 | static int ecb_decrypt(struct crypto_tfm *tfm, |
@@ -195,9 +237,13 @@ static int ecb_decrypt(struct crypto_tfm *tfm, | |||
195 | struct scatterlist *src, | 237 | struct scatterlist *src, |
196 | unsigned int nbytes) | 238 | unsigned int nbytes) |
197 | { | 239 | { |
198 | return crypt(tfm, dst, src, nbytes, | 240 | struct cipher_desc desc; |
199 | tfm->__crt_alg->cra_cipher.cia_decrypt, | 241 | |
200 | ecb_process, NULL); | 242 | desc.tfm = tfm; |
243 | desc.crfn = tfm->__crt_alg->cra_cipher.cia_decrypt; | ||
244 | desc.prfn = ecb_process; | ||
245 | |||
246 | return crypt(&desc, dst, src, nbytes); | ||
201 | } | 247 | } |
202 | 248 | ||
203 | static int cbc_encrypt(struct crypto_tfm *tfm, | 249 | static int cbc_encrypt(struct crypto_tfm *tfm, |
@@ -205,9 +251,14 @@ static int cbc_encrypt(struct crypto_tfm *tfm, | |||
205 | struct scatterlist *src, | 251 | struct scatterlist *src, |
206 | unsigned int nbytes) | 252 | unsigned int nbytes) |
207 | { | 253 | { |
208 | return crypt(tfm, dst, src, nbytes, | 254 | struct cipher_desc desc; |
209 | tfm->__crt_alg->cra_cipher.cia_encrypt, | 255 | |
210 | cbc_process_encrypt, tfm->crt_cipher.cit_iv); | 256 | desc.tfm = tfm; |
257 | desc.crfn = tfm->__crt_alg->cra_cipher.cia_encrypt; | ||
258 | desc.prfn = cbc_process_encrypt; | ||
259 | desc.info = tfm->crt_cipher.cit_iv; | ||
260 | |||
261 | return crypt(&desc, dst, src, nbytes); | ||
211 | } | 262 | } |
212 | 263 | ||
213 | static int cbc_encrypt_iv(struct crypto_tfm *tfm, | 264 | static int cbc_encrypt_iv(struct crypto_tfm *tfm, |
@@ -215,9 +266,14 @@ static int cbc_encrypt_iv(struct crypto_tfm *tfm, | |||
215 | struct scatterlist *src, | 266 | struct scatterlist *src, |
216 | unsigned int nbytes, u8 *iv) | 267 | unsigned int nbytes, u8 *iv) |
217 | { | 268 | { |
218 | return crypt(tfm, dst, src, nbytes, | 269 | struct cipher_desc desc; |
219 | tfm->__crt_alg->cra_cipher.cia_encrypt, | 270 | |
220 | cbc_process_encrypt, iv); | 271 | desc.tfm = tfm; |
272 | desc.crfn = tfm->__crt_alg->cra_cipher.cia_encrypt; | ||
273 | desc.prfn = cbc_process_encrypt; | ||
274 | desc.info = iv; | ||
275 | |||
276 | return crypt(&desc, dst, src, nbytes); | ||
221 | } | 277 | } |
222 | 278 | ||
223 | static int cbc_decrypt(struct crypto_tfm *tfm, | 279 | static int cbc_decrypt(struct crypto_tfm *tfm, |
@@ -225,9 +281,14 @@ static int cbc_decrypt(struct crypto_tfm *tfm, | |||
225 | struct scatterlist *src, | 281 | struct scatterlist *src, |
226 | unsigned int nbytes) | 282 | unsigned int nbytes) |
227 | { | 283 | { |
228 | return crypt(tfm, dst, src, nbytes, | 284 | struct cipher_desc desc; |
229 | tfm->__crt_alg->cra_cipher.cia_decrypt, | 285 | |
230 | cbc_process_decrypt, tfm->crt_cipher.cit_iv); | 286 | desc.tfm = tfm; |
287 | desc.crfn = tfm->__crt_alg->cra_cipher.cia_decrypt; | ||
288 | desc.prfn = cbc_process_decrypt; | ||
289 | desc.info = tfm->crt_cipher.cit_iv; | ||
290 | |||
291 | return crypt(&desc, dst, src, nbytes); | ||
231 | } | 292 | } |
232 | 293 | ||
233 | static int cbc_decrypt_iv(struct crypto_tfm *tfm, | 294 | static int cbc_decrypt_iv(struct crypto_tfm *tfm, |
@@ -235,9 +296,14 @@ static int cbc_decrypt_iv(struct crypto_tfm *tfm, | |||
235 | struct scatterlist *src, | 296 | struct scatterlist *src, |
236 | unsigned int nbytes, u8 *iv) | 297 | unsigned int nbytes, u8 *iv) |
237 | { | 298 | { |
238 | return crypt(tfm, dst, src, nbytes, | 299 | struct cipher_desc desc; |
239 | tfm->__crt_alg->cra_cipher.cia_decrypt, | 300 | |
240 | cbc_process_decrypt, iv); | 301 | desc.tfm = tfm; |
302 | desc.crfn = tfm->__crt_alg->cra_cipher.cia_decrypt; | ||
303 | desc.prfn = cbc_process_decrypt; | ||
304 | desc.info = iv; | ||
305 | |||
306 | return crypt(&desc, dst, src, nbytes); | ||
241 | } | 307 | } |
242 | 308 | ||
243 | static int nocrypt(struct crypto_tfm *tfm, | 309 | static int nocrypt(struct crypto_tfm *tfm, |
diff --git a/crypto/scatterwalk.c b/crypto/scatterwalk.c index 50c9461e8cc6..47ac90e615f4 100644 --- a/crypto/scatterwalk.c +++ b/crypto/scatterwalk.c | |||
@@ -100,7 +100,7 @@ void scatterwalk_done(struct scatter_walk *walk, int out, int more) | |||
100 | int scatterwalk_copychunks(void *buf, struct scatter_walk *walk, | 100 | int scatterwalk_copychunks(void *buf, struct scatter_walk *walk, |
101 | size_t nbytes, int out) | 101 | size_t nbytes, int out) |
102 | { | 102 | { |
103 | do { | 103 | while (nbytes > walk->len_this_page) { |
104 | memcpy_dir(buf, walk->data, walk->len_this_page, out); | 104 | memcpy_dir(buf, walk->data, walk->len_this_page, out); |
105 | buf += walk->len_this_page; | 105 | buf += walk->len_this_page; |
106 | nbytes -= walk->len_this_page; | 106 | nbytes -= walk->len_this_page; |
@@ -108,7 +108,7 @@ int scatterwalk_copychunks(void *buf, struct scatter_walk *walk, | |||
108 | scatterwalk_unmap(walk, out); | 108 | scatterwalk_unmap(walk, out); |
109 | scatterwalk_pagedone(walk, out, 1); | 109 | scatterwalk_pagedone(walk, out, 1); |
110 | scatterwalk_map(walk, out); | 110 | scatterwalk_map(walk, out); |
111 | } while (nbytes > walk->len_this_page); | 111 | } |
112 | 112 | ||
113 | memcpy_dir(buf, walk->data, nbytes, out); | 113 | memcpy_dir(buf, walk->data, nbytes, out); |
114 | return nbytes; | 114 | return nbytes; |
diff --git a/crypto/scatterwalk.h b/crypto/scatterwalk.h index 02aa56c649b4..5495bb970816 100644 --- a/crypto/scatterwalk.h +++ b/crypto/scatterwalk.h | |||
@@ -40,10 +40,10 @@ static inline int scatterwalk_samebuf(struct scatter_walk *walk_in, | |||
40 | walk_in->offset == walk_out->offset; | 40 | walk_in->offset == walk_out->offset; |
41 | } | 41 | } |
42 | 42 | ||
43 | static inline int scatterwalk_across_pages(struct scatter_walk *walk, | 43 | static inline unsigned int scatterwalk_clamp(struct scatter_walk *walk, |
44 | unsigned int nbytes) | 44 | unsigned int nbytes) |
45 | { | 45 | { |
46 | return nbytes > walk->len_this_page; | 46 | return nbytes > walk->len_this_page ? walk->len_this_page : nbytes; |
47 | } | 47 | } |
48 | 48 | ||
49 | static inline void scatterwalk_advance(struct scatter_walk *walk, | 49 | static inline void scatterwalk_advance(struct scatter_walk *walk, |