diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2007-01-26 18:05:15 -0500 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2007-02-06 17:21:00 -0500 |
commit | f1ddcaf3393b7a3871809b97fae90fac841a1f39 (patch) | |
tree | ed73db33ec9160ecafee9b8a12ba369f98fd21e0 | |
parent | ba8da2a9485f22455dcb06dd17e2f6d94b81ba89 (diff) |
[CRYPTO] api: Remove deprecated interface
This patch removes the old cipher interface and related code.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r-- | crypto/algapi.c | 2 | ||||
-rw-r--r-- | crypto/api.c | 63 | ||||
-rw-r--r-- | crypto/cipher.c | 447 | ||||
-rw-r--r-- | crypto/compress.c | 5 | ||||
-rw-r--r-- | crypto/digest.c | 5 | ||||
-rw-r--r-- | crypto/internal.h | 26 | ||||
-rw-r--r-- | fs/ecryptfs/crypto.c | 4 | ||||
-rw-r--r-- | fs/ecryptfs/ecryptfs_kernel.h | 1 | ||||
-rw-r--r-- | include/linux/crypto.h | 127 |
9 files changed, 17 insertions, 663 deletions
diff --git a/crypto/algapi.c b/crypto/algapi.c index c91530021e9c..69eb504721a4 100644 --- a/crypto/algapi.c +++ b/crypto/algapi.c | |||
@@ -396,7 +396,7 @@ struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn) | |||
396 | return ERR_PTR(-EAGAIN); | 396 | return ERR_PTR(-EAGAIN); |
397 | } | 397 | } |
398 | 398 | ||
399 | tfm = __crypto_alloc_tfm(alg, 0); | 399 | tfm = __crypto_alloc_tfm(alg); |
400 | if (IS_ERR(tfm)) | 400 | if (IS_ERR(tfm)) |
401 | crypto_mod_put(alg); | 401 | crypto_mod_put(alg); |
402 | 402 | ||
diff --git a/crypto/api.c b/crypto/api.c index 8c446871cd5b..8b80baec853a 100644 --- a/crypto/api.c +++ b/crypto/api.c | |||
@@ -212,25 +212,6 @@ struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) | |||
212 | } | 212 | } |
213 | EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); | 213 | EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); |
214 | 214 | ||
215 | static int crypto_init_flags(struct crypto_tfm *tfm, u32 flags) | ||
216 | { | ||
217 | tfm->crt_flags = flags & CRYPTO_TFM_REQ_MASK; | ||
218 | flags &= ~CRYPTO_TFM_REQ_MASK; | ||
219 | |||
220 | switch (crypto_tfm_alg_type(tfm)) { | ||
221 | case CRYPTO_ALG_TYPE_CIPHER: | ||
222 | return crypto_init_cipher_flags(tfm, flags); | ||
223 | |||
224 | case CRYPTO_ALG_TYPE_DIGEST: | ||
225 | return crypto_init_digest_flags(tfm, flags); | ||
226 | |||
227 | case CRYPTO_ALG_TYPE_COMPRESS: | ||
228 | return crypto_init_compress_flags(tfm, flags); | ||
229 | } | ||
230 | |||
231 | return 0; | ||
232 | } | ||
233 | |||
234 | static int crypto_init_ops(struct crypto_tfm *tfm) | 215 | static int crypto_init_ops(struct crypto_tfm *tfm) |
235 | { | 216 | { |
236 | const struct crypto_type *type = tfm->__crt_alg->cra_type; | 217 | const struct crypto_type *type = tfm->__crt_alg->cra_type; |
@@ -285,7 +266,7 @@ static void crypto_exit_ops(struct crypto_tfm *tfm) | |||
285 | } | 266 | } |
286 | } | 267 | } |
287 | 268 | ||
288 | static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags) | 269 | static unsigned int crypto_ctxsize(struct crypto_alg *alg) |
289 | { | 270 | { |
290 | const struct crypto_type *type = alg->cra_type; | 271 | const struct crypto_type *type = alg->cra_type; |
291 | unsigned int len; | 272 | unsigned int len; |
@@ -299,15 +280,15 @@ static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags) | |||
299 | BUG(); | 280 | BUG(); |
300 | 281 | ||
301 | case CRYPTO_ALG_TYPE_CIPHER: | 282 | case CRYPTO_ALG_TYPE_CIPHER: |
302 | len += crypto_cipher_ctxsize(alg, flags); | 283 | len += crypto_cipher_ctxsize(alg); |
303 | break; | 284 | break; |
304 | 285 | ||
305 | case CRYPTO_ALG_TYPE_DIGEST: | 286 | case CRYPTO_ALG_TYPE_DIGEST: |
306 | len += crypto_digest_ctxsize(alg, flags); | 287 | len += crypto_digest_ctxsize(alg); |
307 | break; | 288 | break; |
308 | 289 | ||
309 | case CRYPTO_ALG_TYPE_COMPRESS: | 290 | case CRYPTO_ALG_TYPE_COMPRESS: |
310 | len += crypto_compress_ctxsize(alg, flags); | 291 | len += crypto_compress_ctxsize(alg); |
311 | break; | 292 | break; |
312 | } | 293 | } |
313 | 294 | ||
@@ -322,23 +303,19 @@ void crypto_shoot_alg(struct crypto_alg *alg) | |||
322 | } | 303 | } |
323 | EXPORT_SYMBOL_GPL(crypto_shoot_alg); | 304 | EXPORT_SYMBOL_GPL(crypto_shoot_alg); |
324 | 305 | ||
325 | struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 flags) | 306 | struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg) |
326 | { | 307 | { |
327 | struct crypto_tfm *tfm = NULL; | 308 | struct crypto_tfm *tfm = NULL; |
328 | unsigned int tfm_size; | 309 | unsigned int tfm_size; |
329 | int err = -ENOMEM; | 310 | int err = -ENOMEM; |
330 | 311 | ||
331 | tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags); | 312 | tfm_size = sizeof(*tfm) + crypto_ctxsize(alg); |
332 | tfm = kzalloc(tfm_size, GFP_KERNEL); | 313 | tfm = kzalloc(tfm_size, GFP_KERNEL); |
333 | if (tfm == NULL) | 314 | if (tfm == NULL) |
334 | goto out_err; | 315 | goto out_err; |
335 | 316 | ||
336 | tfm->__crt_alg = alg; | 317 | tfm->__crt_alg = alg; |
337 | 318 | ||
338 | err = crypto_init_flags(tfm, flags); | ||
339 | if (err) | ||
340 | goto out_free_tfm; | ||
341 | |||
342 | err = crypto_init_ops(tfm); | 319 | err = crypto_init_ops(tfm); |
343 | if (err) | 320 | if (err) |
344 | goto out_free_tfm; | 321 | goto out_free_tfm; |
@@ -362,31 +339,6 @@ out: | |||
362 | } | 339 | } |
363 | EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); | 340 | EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); |
364 | 341 | ||
365 | struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) | ||
366 | { | ||
367 | struct crypto_tfm *tfm = NULL; | ||
368 | int err; | ||
369 | |||
370 | do { | ||
371 | struct crypto_alg *alg; | ||
372 | |||
373 | alg = crypto_alg_mod_lookup(name, 0, CRYPTO_ALG_ASYNC); | ||
374 | err = PTR_ERR(alg); | ||
375 | if (IS_ERR(alg)) | ||
376 | continue; | ||
377 | |||
378 | tfm = __crypto_alloc_tfm(alg, flags); | ||
379 | err = 0; | ||
380 | if (IS_ERR(tfm)) { | ||
381 | crypto_mod_put(alg); | ||
382 | err = PTR_ERR(tfm); | ||
383 | tfm = NULL; | ||
384 | } | ||
385 | } while (err == -EAGAIN && !signal_pending(current)); | ||
386 | |||
387 | return tfm; | ||
388 | } | ||
389 | |||
390 | /* | 342 | /* |
391 | * crypto_alloc_base - Locate algorithm and allocate transform | 343 | * crypto_alloc_base - Locate algorithm and allocate transform |
392 | * @alg_name: Name of algorithm | 344 | * @alg_name: Name of algorithm |
@@ -420,7 +372,7 @@ struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) | |||
420 | goto err; | 372 | goto err; |
421 | } | 373 | } |
422 | 374 | ||
423 | tfm = __crypto_alloc_tfm(alg, 0); | 375 | tfm = __crypto_alloc_tfm(alg); |
424 | if (!IS_ERR(tfm)) | 376 | if (!IS_ERR(tfm)) |
425 | return tfm; | 377 | return tfm; |
426 | 378 | ||
@@ -466,7 +418,6 @@ void crypto_free_tfm(struct crypto_tfm *tfm) | |||
466 | kfree(tfm); | 418 | kfree(tfm); |
467 | } | 419 | } |
468 | 420 | ||
469 | EXPORT_SYMBOL_GPL(crypto_alloc_tfm); | ||
470 | EXPORT_SYMBOL_GPL(crypto_free_tfm); | 421 | EXPORT_SYMBOL_GPL(crypto_free_tfm); |
471 | 422 | ||
472 | int crypto_has_alg(const char *name, u32 type, u32 mask) | 423 | int crypto_has_alg(const char *name, u32 type, u32 mask) |
diff --git a/crypto/cipher.c b/crypto/cipher.c index 9e03701cfdcc..333aab2f0277 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c | |||
@@ -12,274 +12,13 @@ | |||
12 | * any later version. | 12 | * any later version. |
13 | * | 13 | * |
14 | */ | 14 | */ |
15 | #include <linux/compiler.h> | 15 | |
16 | #include <linux/kernel.h> | 16 | #include <linux/kernel.h> |
17 | #include <linux/crypto.h> | 17 | #include <linux/crypto.h> |
18 | #include <linux/errno.h> | 18 | #include <linux/errno.h> |
19 | #include <linux/mm.h> | 19 | #include <linux/scatterlist.h> |
20 | #include <linux/slab.h> | ||
21 | #include <linux/string.h> | 20 | #include <linux/string.h> |
22 | #include <asm/scatterlist.h> | ||
23 | #include "internal.h" | 21 | #include "internal.h" |
24 | #include "scatterwalk.h" | ||
25 | |||
26 | struct cipher_alg_compat { | ||
27 | unsigned int cia_min_keysize; | ||
28 | unsigned int cia_max_keysize; | ||
29 | int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key, | ||
30 | unsigned int keylen); | ||
31 | void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); | ||
32 | void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); | ||
33 | |||
34 | unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc, | ||
35 | u8 *dst, const u8 *src, | ||
36 | unsigned int nbytes); | ||
37 | unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc, | ||
38 | u8 *dst, const u8 *src, | ||
39 | unsigned int nbytes); | ||
40 | unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc, | ||
41 | u8 *dst, const u8 *src, | ||
42 | unsigned int nbytes); | ||
43 | unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc, | ||
44 | u8 *dst, const u8 *src, | ||
45 | unsigned int nbytes); | ||
46 | }; | ||
47 | |||
48 | static inline void xor_64(u8 *a, const u8 *b) | ||
49 | { | ||
50 | ((u32 *)a)[0] ^= ((u32 *)b)[0]; | ||
51 | ((u32 *)a)[1] ^= ((u32 *)b)[1]; | ||
52 | } | ||
53 | |||
54 | static inline void xor_128(u8 *a, const u8 *b) | ||
55 | { | ||
56 | ((u32 *)a)[0] ^= ((u32 *)b)[0]; | ||
57 | ((u32 *)a)[1] ^= ((u32 *)b)[1]; | ||
58 | ((u32 *)a)[2] ^= ((u32 *)b)[2]; | ||
59 | ((u32 *)a)[3] ^= ((u32 *)b)[3]; | ||
60 | } | ||
61 | |||
62 | static unsigned int crypt_slow(const struct cipher_desc *desc, | ||
63 | struct scatter_walk *in, | ||
64 | struct scatter_walk *out, unsigned int bsize) | ||
65 | { | ||
66 | unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm); | ||
67 | u8 buffer[bsize * 2 + alignmask]; | ||
68 | u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | ||
69 | u8 *dst = src + bsize; | ||
70 | |||
71 | scatterwalk_copychunks(src, in, bsize, 0); | ||
72 | desc->prfn(desc, dst, src, bsize); | ||
73 | scatterwalk_copychunks(dst, out, bsize, 1); | ||
74 | |||
75 | return bsize; | ||
76 | } | ||
77 | |||
78 | static inline unsigned int crypt_fast(const struct cipher_desc *desc, | ||
79 | struct scatter_walk *in, | ||
80 | struct scatter_walk *out, | ||
81 | unsigned int nbytes, u8 *tmp) | ||
82 | { | ||
83 | u8 *src, *dst; | ||
84 | u8 *real_src, *real_dst; | ||
85 | |||
86 | real_src = scatterwalk_map(in, 0); | ||
87 | real_dst = scatterwalk_map(out, 1); | ||
88 | |||
89 | src = real_src; | ||
90 | dst = scatterwalk_samebuf(in, out) ? src : real_dst; | ||
91 | |||
92 | if (tmp) { | ||
93 | memcpy(tmp, src, nbytes); | ||
94 | src = tmp; | ||
95 | dst = tmp; | ||
96 | } | ||
97 | |||
98 | nbytes = desc->prfn(desc, dst, src, nbytes); | ||
99 | |||
100 | if (tmp) | ||
101 | memcpy(real_dst, tmp, nbytes); | ||
102 | |||
103 | scatterwalk_unmap(real_src, 0); | ||
104 | scatterwalk_unmap(real_dst, 1); | ||
105 | |||
106 | scatterwalk_advance(in, nbytes); | ||
107 | scatterwalk_advance(out, nbytes); | ||
108 | |||
109 | return nbytes; | ||
110 | } | ||
111 | |||
112 | /* | ||
113 | * Generic encrypt/decrypt wrapper for ciphers, handles operations across | ||
114 | * multiple page boundaries by using temporary blocks. In user context, | ||
115 | * the kernel is given a chance to schedule us once per page. | ||
116 | */ | ||
117 | static int crypt(const struct cipher_desc *desc, | ||
118 | struct scatterlist *dst, | ||
119 | struct scatterlist *src, | ||
120 | unsigned int nbytes) | ||
121 | { | ||
122 | struct scatter_walk walk_in, walk_out; | ||
123 | struct crypto_tfm *tfm = desc->tfm; | ||
124 | const unsigned int bsize = crypto_tfm_alg_blocksize(tfm); | ||
125 | unsigned int alignmask = crypto_tfm_alg_alignmask(tfm); | ||
126 | unsigned long buffer = 0; | ||
127 | |||
128 | if (!nbytes) | ||
129 | return 0; | ||
130 | |||
131 | if (nbytes % bsize) { | ||
132 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN; | ||
133 | return -EINVAL; | ||
134 | } | ||
135 | |||
136 | scatterwalk_start(&walk_in, src); | ||
137 | scatterwalk_start(&walk_out, dst); | ||
138 | |||
139 | for(;;) { | ||
140 | unsigned int n = nbytes; | ||
141 | u8 *tmp = NULL; | ||
142 | |||
143 | if (!scatterwalk_aligned(&walk_in, alignmask) || | ||
144 | !scatterwalk_aligned(&walk_out, alignmask)) { | ||
145 | if (!buffer) { | ||
146 | buffer = __get_free_page(GFP_ATOMIC); | ||
147 | if (!buffer) | ||
148 | n = 0; | ||
149 | } | ||
150 | tmp = (u8 *)buffer; | ||
151 | } | ||
152 | |||
153 | n = scatterwalk_clamp(&walk_in, n); | ||
154 | n = scatterwalk_clamp(&walk_out, n); | ||
155 | |||
156 | if (likely(n >= bsize)) | ||
157 | n = crypt_fast(desc, &walk_in, &walk_out, n, tmp); | ||
158 | else | ||
159 | n = crypt_slow(desc, &walk_in, &walk_out, bsize); | ||
160 | |||
161 | nbytes -= n; | ||
162 | |||
163 | scatterwalk_done(&walk_in, 0, nbytes); | ||
164 | scatterwalk_done(&walk_out, 1, nbytes); | ||
165 | |||
166 | if (!nbytes) | ||
167 | break; | ||
168 | |||
169 | crypto_yield(tfm->crt_flags); | ||
170 | } | ||
171 | |||
172 | if (buffer) | ||
173 | free_page(buffer); | ||
174 | |||
175 | return 0; | ||
176 | } | ||
177 | |||
178 | static int crypt_iv_unaligned(struct cipher_desc *desc, | ||
179 | struct scatterlist *dst, | ||
180 | struct scatterlist *src, | ||
181 | unsigned int nbytes) | ||
182 | { | ||
183 | struct crypto_tfm *tfm = desc->tfm; | ||
184 | unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); | ||
185 | u8 *iv = desc->info; | ||
186 | |||
187 | if (unlikely(((unsigned long)iv & alignmask))) { | ||
188 | unsigned int ivsize = tfm->crt_cipher.cit_ivsize; | ||
189 | u8 buffer[ivsize + alignmask]; | ||
190 | u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | ||
191 | int err; | ||
192 | |||
193 | desc->info = memcpy(tmp, iv, ivsize); | ||
194 | err = crypt(desc, dst, src, nbytes); | ||
195 | memcpy(iv, tmp, ivsize); | ||
196 | |||
197 | return err; | ||
198 | } | ||
199 | |||
200 | return crypt(desc, dst, src, nbytes); | ||
201 | } | ||
202 | |||
203 | static unsigned int cbc_process_encrypt(const struct cipher_desc *desc, | ||
204 | u8 *dst, const u8 *src, | ||
205 | unsigned int nbytes) | ||
206 | { | ||
207 | struct crypto_tfm *tfm = desc->tfm; | ||
208 | void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; | ||
209 | int bsize = crypto_tfm_alg_blocksize(tfm); | ||
210 | |||
211 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; | ||
212 | u8 *iv = desc->info; | ||
213 | unsigned int done = 0; | ||
214 | |||
215 | nbytes -= bsize; | ||
216 | |||
217 | do { | ||
218 | xor(iv, src); | ||
219 | fn(tfm, dst, iv); | ||
220 | memcpy(iv, dst, bsize); | ||
221 | |||
222 | src += bsize; | ||
223 | dst += bsize; | ||
224 | } while ((done += bsize) <= nbytes); | ||
225 | |||
226 | return done; | ||
227 | } | ||
228 | |||
229 | static unsigned int cbc_process_decrypt(const struct cipher_desc *desc, | ||
230 | u8 *dst, const u8 *src, | ||
231 | unsigned int nbytes) | ||
232 | { | ||
233 | struct crypto_tfm *tfm = desc->tfm; | ||
234 | void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; | ||
235 | int bsize = crypto_tfm_alg_blocksize(tfm); | ||
236 | unsigned long alignmask = crypto_tfm_alg_alignmask(desc->tfm); | ||
237 | |||
238 | u8 stack[src == dst ? bsize + alignmask : 0]; | ||
239 | u8 *buf = (u8 *)ALIGN((unsigned long)stack, alignmask + 1); | ||
240 | u8 **dst_p = src == dst ? &buf : &dst; | ||
241 | |||
242 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; | ||
243 | u8 *iv = desc->info; | ||
244 | unsigned int done = 0; | ||
245 | |||
246 | nbytes -= bsize; | ||
247 | |||
248 | do { | ||
249 | u8 *tmp_dst = *dst_p; | ||
250 | |||
251 | fn(tfm, tmp_dst, src); | ||
252 | xor(tmp_dst, iv); | ||
253 | memcpy(iv, src, bsize); | ||
254 | if (tmp_dst != dst) | ||
255 | memcpy(dst, tmp_dst, bsize); | ||
256 | |||
257 | src += bsize; | ||
258 | dst += bsize; | ||
259 | } while ((done += bsize) <= nbytes); | ||
260 | |||
261 | return done; | ||
262 | } | ||
263 | |||
264 | static unsigned int ecb_process(const struct cipher_desc *desc, u8 *dst, | ||
265 | const u8 *src, unsigned int nbytes) | ||
266 | { | ||
267 | struct crypto_tfm *tfm = desc->tfm; | ||
268 | int bsize = crypto_tfm_alg_blocksize(tfm); | ||
269 | void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = desc->crfn; | ||
270 | unsigned int done = 0; | ||
271 | |||
272 | nbytes -= bsize; | ||
273 | |||
274 | do { | ||
275 | fn(tfm, dst, src); | ||
276 | |||
277 | src += bsize; | ||
278 | dst += bsize; | ||
279 | } while ((done += bsize) <= nbytes); | ||
280 | |||
281 | return done; | ||
282 | } | ||
283 | 22 | ||
284 | static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) | 23 | static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) |
285 | { | 24 | { |
@@ -293,122 +32,6 @@ static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) | |||
293 | return cia->cia_setkey(tfm, key, keylen); | 32 | return cia->cia_setkey(tfm, key, keylen); |
294 | } | 33 | } |
295 | 34 | ||
296 | static int ecb_encrypt(struct crypto_tfm *tfm, | ||
297 | struct scatterlist *dst, | ||
298 | struct scatterlist *src, unsigned int nbytes) | ||
299 | { | ||
300 | struct cipher_desc desc; | ||
301 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; | ||
302 | |||
303 | desc.tfm = tfm; | ||
304 | desc.crfn = cipher->cia_encrypt; | ||
305 | desc.prfn = cipher->cia_encrypt_ecb ?: ecb_process; | ||
306 | |||
307 | return crypt(&desc, dst, src, nbytes); | ||
308 | } | ||
309 | |||
310 | static int ecb_decrypt(struct crypto_tfm *tfm, | ||
311 | struct scatterlist *dst, | ||
312 | struct scatterlist *src, | ||
313 | unsigned int nbytes) | ||
314 | { | ||
315 | struct cipher_desc desc; | ||
316 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; | ||
317 | |||
318 | desc.tfm = tfm; | ||
319 | desc.crfn = cipher->cia_decrypt; | ||
320 | desc.prfn = cipher->cia_decrypt_ecb ?: ecb_process; | ||
321 | |||
322 | return crypt(&desc, dst, src, nbytes); | ||
323 | } | ||
324 | |||
325 | static int cbc_encrypt(struct crypto_tfm *tfm, | ||
326 | struct scatterlist *dst, | ||
327 | struct scatterlist *src, | ||
328 | unsigned int nbytes) | ||
329 | { | ||
330 | struct cipher_desc desc; | ||
331 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; | ||
332 | |||
333 | desc.tfm = tfm; | ||
334 | desc.crfn = cipher->cia_encrypt; | ||
335 | desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt; | ||
336 | desc.info = tfm->crt_cipher.cit_iv; | ||
337 | |||
338 | return crypt(&desc, dst, src, nbytes); | ||
339 | } | ||
340 | |||
341 | static int cbc_encrypt_iv(struct crypto_tfm *tfm, | ||
342 | struct scatterlist *dst, | ||
343 | struct scatterlist *src, | ||
344 | unsigned int nbytes, u8 *iv) | ||
345 | { | ||
346 | struct cipher_desc desc; | ||
347 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; | ||
348 | |||
349 | desc.tfm = tfm; | ||
350 | desc.crfn = cipher->cia_encrypt; | ||
351 | desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt; | ||
352 | desc.info = iv; | ||
353 | |||
354 | return crypt_iv_unaligned(&desc, dst, src, nbytes); | ||
355 | } | ||
356 | |||
357 | static int cbc_decrypt(struct crypto_tfm *tfm, | ||
358 | struct scatterlist *dst, | ||
359 | struct scatterlist *src, | ||
360 | unsigned int nbytes) | ||
361 | { | ||
362 | struct cipher_desc desc; | ||
363 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; | ||
364 | |||
365 | desc.tfm = tfm; | ||
366 | desc.crfn = cipher->cia_decrypt; | ||
367 | desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt; | ||
368 | desc.info = tfm->crt_cipher.cit_iv; | ||
369 | |||
370 | return crypt(&desc, dst, src, nbytes); | ||
371 | } | ||
372 | |||
373 | static int cbc_decrypt_iv(struct crypto_tfm *tfm, | ||
374 | struct scatterlist *dst, | ||
375 | struct scatterlist *src, | ||
376 | unsigned int nbytes, u8 *iv) | ||
377 | { | ||
378 | struct cipher_desc desc; | ||
379 | struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher; | ||
380 | |||
381 | desc.tfm = tfm; | ||
382 | desc.crfn = cipher->cia_decrypt; | ||
383 | desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt; | ||
384 | desc.info = iv; | ||
385 | |||
386 | return crypt_iv_unaligned(&desc, dst, src, nbytes); | ||
387 | } | ||
388 | |||
389 | static int nocrypt(struct crypto_tfm *tfm, | ||
390 | struct scatterlist *dst, | ||
391 | struct scatterlist *src, | ||
392 | unsigned int nbytes) | ||
393 | { | ||
394 | return -ENOSYS; | ||
395 | } | ||
396 | |||
397 | static int nocrypt_iv(struct crypto_tfm *tfm, | ||
398 | struct scatterlist *dst, | ||
399 | struct scatterlist *src, | ||
400 | unsigned int nbytes, u8 *iv) | ||
401 | { | ||
402 | return -ENOSYS; | ||
403 | } | ||
404 | |||
405 | int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags) | ||
406 | { | ||
407 | u32 mode = flags & CRYPTO_TFM_MODE_MASK; | ||
408 | tfm->crt_cipher.cit_mode = mode ? mode : CRYPTO_TFM_MODE_ECB; | ||
409 | return 0; | ||
410 | } | ||
411 | |||
412 | static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *, | 35 | static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *, |
413 | const u8 *), | 36 | const u8 *), |
414 | struct crypto_tfm *tfm, | 37 | struct crypto_tfm *tfm, |
@@ -454,7 +77,6 @@ static void cipher_decrypt_unaligned(struct crypto_tfm *tfm, | |||
454 | 77 | ||
455 | int crypto_init_cipher_ops(struct crypto_tfm *tfm) | 78 | int crypto_init_cipher_ops(struct crypto_tfm *tfm) |
456 | { | 79 | { |
457 | int ret = 0; | ||
458 | struct cipher_tfm *ops = &tfm->crt_cipher; | 80 | struct cipher_tfm *ops = &tfm->crt_cipher; |
459 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; | 81 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; |
460 | 82 | ||
@@ -464,70 +86,7 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm) | |||
464 | ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ? | 86 | ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ? |
465 | cipher_decrypt_unaligned : cipher->cia_decrypt; | 87 | cipher_decrypt_unaligned : cipher->cia_decrypt; |
466 | 88 | ||
467 | switch (tfm->crt_cipher.cit_mode) { | 89 | return 0; |
468 | case CRYPTO_TFM_MODE_ECB: | ||
469 | ops->cit_encrypt = ecb_encrypt; | ||
470 | ops->cit_decrypt = ecb_decrypt; | ||
471 | ops->cit_encrypt_iv = nocrypt_iv; | ||
472 | ops->cit_decrypt_iv = nocrypt_iv; | ||
473 | break; | ||
474 | |||
475 | case CRYPTO_TFM_MODE_CBC: | ||
476 | ops->cit_encrypt = cbc_encrypt; | ||
477 | ops->cit_decrypt = cbc_decrypt; | ||
478 | ops->cit_encrypt_iv = cbc_encrypt_iv; | ||
479 | ops->cit_decrypt_iv = cbc_decrypt_iv; | ||
480 | break; | ||
481 | |||
482 | case CRYPTO_TFM_MODE_CFB: | ||
483 | ops->cit_encrypt = nocrypt; | ||
484 | ops->cit_decrypt = nocrypt; | ||
485 | ops->cit_encrypt_iv = nocrypt_iv; | ||
486 | ops->cit_decrypt_iv = nocrypt_iv; | ||
487 | break; | ||
488 | |||
489 | case CRYPTO_TFM_MODE_CTR: | ||
490 | ops->cit_encrypt = nocrypt; | ||
491 | ops->cit_decrypt = nocrypt; | ||
492 | ops->cit_encrypt_iv = nocrypt_iv; | ||
493 | ops->cit_decrypt_iv = nocrypt_iv; | ||
494 | break; | ||
495 | |||
496 | default: | ||
497 | BUG(); | ||
498 | } | ||
499 | |||
500 | if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) { | ||
501 | unsigned long align; | ||
502 | unsigned long addr; | ||
503 | |||
504 | switch (crypto_tfm_alg_blocksize(tfm)) { | ||
505 | case 8: | ||
506 | ops->cit_xor_block = xor_64; | ||
507 | break; | ||
508 | |||
509 | case 16: | ||
510 | ops->cit_xor_block = xor_128; | ||
511 | break; | ||
512 | |||
513 | default: | ||
514 | printk(KERN_WARNING "%s: block size %u not supported\n", | ||
515 | crypto_tfm_alg_name(tfm), | ||
516 | crypto_tfm_alg_blocksize(tfm)); | ||
517 | ret = -EINVAL; | ||
518 | goto out; | ||
519 | } | ||
520 | |||
521 | ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm); | ||
522 | align = crypto_tfm_alg_alignmask(tfm) + 1; | ||
523 | addr = (unsigned long)crypto_tfm_ctx(tfm); | ||
524 | addr = ALIGN(addr, align); | ||
525 | addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align); | ||
526 | ops->cit_iv = (void *)addr; | ||
527 | } | ||
528 | |||
529 | out: | ||
530 | return ret; | ||
531 | } | 90 | } |
532 | 91 | ||
533 | void crypto_exit_cipher_ops(struct crypto_tfm *tfm) | 92 | void crypto_exit_cipher_ops(struct crypto_tfm *tfm) |
diff --git a/crypto/compress.c b/crypto/compress.c index eca182aa3380..0a6570048c1e 100644 --- a/crypto/compress.c +++ b/crypto/compress.c | |||
@@ -34,11 +34,6 @@ static int crypto_decompress(struct crypto_tfm *tfm, | |||
34 | dlen); | 34 | dlen); |
35 | } | 35 | } |
36 | 36 | ||
37 | int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags) | ||
38 | { | ||
39 | return flags ? -EINVAL : 0; | ||
40 | } | ||
41 | |||
42 | int crypto_init_compress_ops(struct crypto_tfm *tfm) | 37 | int crypto_init_compress_ops(struct crypto_tfm *tfm) |
43 | { | 38 | { |
44 | struct compress_tfm *ops = &tfm->crt_compress; | 39 | struct compress_tfm *ops = &tfm->crt_compress; |
diff --git a/crypto/digest.c b/crypto/digest.c index bc47af648cb1..1bf7414aeb9e 100644 --- a/crypto/digest.c +++ b/crypto/digest.c | |||
@@ -136,11 +136,6 @@ static int digest(struct hash_desc *desc, | |||
136 | return final(desc, out); | 136 | return final(desc, out); |
137 | } | 137 | } |
138 | 138 | ||
139 | int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags) | ||
140 | { | ||
141 | return flags ? -EINVAL : 0; | ||
142 | } | ||
143 | |||
144 | int crypto_init_digest_ops(struct crypto_tfm *tfm) | 139 | int crypto_init_digest_ops(struct crypto_tfm *tfm) |
145 | { | 140 | { |
146 | struct hash_tfm *ops = &tfm->crt_hash; | 141 | struct hash_tfm *ops = &tfm->crt_hash; |
diff --git a/crypto/internal.h b/crypto/internal.h index 2da6ad4f3593..784a7745315f 100644 --- a/crypto/internal.h +++ b/crypto/internal.h | |||
@@ -83,8 +83,7 @@ static inline void crypto_exit_proc(void) | |||
83 | { } | 83 | { } |
84 | #endif | 84 | #endif |
85 | 85 | ||
86 | static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg, | 86 | static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg) |
87 | int flags) | ||
88 | { | 87 | { |
89 | unsigned int len = alg->cra_ctxsize; | 88 | unsigned int len = alg->cra_ctxsize; |
90 | 89 | ||
@@ -96,23 +95,12 @@ static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg, | |||
96 | return len; | 95 | return len; |
97 | } | 96 | } |
98 | 97 | ||
99 | static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg, | 98 | static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg) |
100 | int flags) | ||
101 | { | 99 | { |
102 | unsigned int len = alg->cra_ctxsize; | 100 | return alg->cra_ctxsize; |
103 | |||
104 | switch (flags & CRYPTO_TFM_MODE_MASK) { | ||
105 | case CRYPTO_TFM_MODE_CBC: | ||
106 | len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1); | ||
107 | len += alg->cra_blocksize; | ||
108 | break; | ||
109 | } | ||
110 | |||
111 | return len; | ||
112 | } | 101 | } |
113 | 102 | ||
114 | static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg, | 103 | static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg) |
115 | int flags) | ||
116 | { | 104 | { |
117 | return alg->cra_ctxsize; | 105 | return alg->cra_ctxsize; |
118 | } | 106 | } |
@@ -121,10 +109,6 @@ struct crypto_alg *crypto_mod_get(struct crypto_alg *alg); | |||
121 | struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask); | 109 | struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask); |
122 | struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask); | 110 | struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask); |
123 | 111 | ||
124 | int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags); | ||
125 | int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags); | ||
126 | int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags); | ||
127 | |||
128 | int crypto_init_digest_ops(struct crypto_tfm *tfm); | 112 | int crypto_init_digest_ops(struct crypto_tfm *tfm); |
129 | int crypto_init_cipher_ops(struct crypto_tfm *tfm); | 113 | int crypto_init_cipher_ops(struct crypto_tfm *tfm); |
130 | int crypto_init_compress_ops(struct crypto_tfm *tfm); | 114 | int crypto_init_compress_ops(struct crypto_tfm *tfm); |
@@ -136,7 +120,7 @@ void crypto_exit_compress_ops(struct crypto_tfm *tfm); | |||
136 | void crypto_larval_error(const char *name, u32 type, u32 mask); | 120 | void crypto_larval_error(const char *name, u32 type, u32 mask); |
137 | 121 | ||
138 | void crypto_shoot_alg(struct crypto_alg *alg); | 122 | void crypto_shoot_alg(struct crypto_alg *alg); |
139 | struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 flags); | 123 | struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg); |
140 | 124 | ||
141 | int crypto_register_instance(struct crypto_template *tmpl, | 125 | int crypto_register_instance(struct crypto_template *tmpl, |
142 | struct crypto_instance *inst); | 126 | struct crypto_instance *inst); |
diff --git a/fs/ecryptfs/crypto.c b/fs/ecryptfs/crypto.c index 7196f50fe152..a86a55ccf874 100644 --- a/fs/ecryptfs/crypto.c +++ b/fs/ecryptfs/crypto.c | |||
@@ -828,9 +828,7 @@ int ecryptfs_init_crypt_ctx(struct ecryptfs_crypt_stat *crypt_stat) | |||
828 | mutex_unlock(&crypt_stat->cs_tfm_mutex); | 828 | mutex_unlock(&crypt_stat->cs_tfm_mutex); |
829 | goto out; | 829 | goto out; |
830 | } | 830 | } |
831 | crypto_blkcipher_set_flags(crypt_stat->tfm, | 831 | crypto_blkcipher_set_flags(crypt_stat->tfm, CRYPTO_TFM_REQ_WEAK_KEY); |
832 | (ECRYPTFS_DEFAULT_CHAINING_MODE | ||
833 | | CRYPTO_TFM_REQ_WEAK_KEY)); | ||
834 | mutex_unlock(&crypt_stat->cs_tfm_mutex); | 832 | mutex_unlock(&crypt_stat->cs_tfm_mutex); |
835 | rc = 0; | 833 | rc = 0; |
836 | out: | 834 | out: |
diff --git a/fs/ecryptfs/ecryptfs_kernel.h b/fs/ecryptfs/ecryptfs_kernel.h index afb64bdbe6ad..0f897109759b 100644 --- a/fs/ecryptfs/ecryptfs_kernel.h +++ b/fs/ecryptfs/ecryptfs_kernel.h | |||
@@ -176,7 +176,6 @@ ecryptfs_get_key_payload_data(struct key *key) | |||
176 | #define ECRYPTFS_FILE_SIZE_BYTES 8 | 176 | #define ECRYPTFS_FILE_SIZE_BYTES 8 |
177 | #define ECRYPTFS_DEFAULT_CIPHER "aes" | 177 | #define ECRYPTFS_DEFAULT_CIPHER "aes" |
178 | #define ECRYPTFS_DEFAULT_KEY_BYTES 16 | 178 | #define ECRYPTFS_DEFAULT_KEY_BYTES 16 |
179 | #define ECRYPTFS_DEFAULT_CHAINING_MODE CRYPTO_TFM_MODE_CBC | ||
180 | #define ECRYPTFS_DEFAULT_HASH "md5" | 179 | #define ECRYPTFS_DEFAULT_HASH "md5" |
181 | #define ECRYPTFS_TAG_3_PACKET_TYPE 0x8C | 180 | #define ECRYPTFS_TAG_3_PACKET_TYPE 0x8C |
182 | #define ECRYPTFS_TAG_11_PACKET_TYPE 0xED | 181 | #define ECRYPTFS_TAG_11_PACKET_TYPE 0xED |
diff --git a/include/linux/crypto.h b/include/linux/crypto.h index 4aa9046601da..95936a5e7c12 100644 --- a/include/linux/crypto.h +++ b/include/linux/crypto.h | |||
@@ -51,15 +51,9 @@ | |||
51 | /* | 51 | /* |
52 | * Transform masks and values (for crt_flags). | 52 | * Transform masks and values (for crt_flags). |
53 | */ | 53 | */ |
54 | #define CRYPTO_TFM_MODE_MASK 0x000000ff | ||
55 | #define CRYPTO_TFM_REQ_MASK 0x000fff00 | 54 | #define CRYPTO_TFM_REQ_MASK 0x000fff00 |
56 | #define CRYPTO_TFM_RES_MASK 0xfff00000 | 55 | #define CRYPTO_TFM_RES_MASK 0xfff00000 |
57 | 56 | ||
58 | #define CRYPTO_TFM_MODE_ECB 0x00000001 | ||
59 | #define CRYPTO_TFM_MODE_CBC 0x00000002 | ||
60 | #define CRYPTO_TFM_MODE_CFB 0x00000004 | ||
61 | #define CRYPTO_TFM_MODE_CTR 0x00000008 | ||
62 | |||
63 | #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100 | 57 | #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100 |
64 | #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200 | 58 | #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200 |
65 | #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000 | 59 | #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000 |
@@ -71,12 +65,8 @@ | |||
71 | /* | 65 | /* |
72 | * Miscellaneous stuff. | 66 | * Miscellaneous stuff. |
73 | */ | 67 | */ |
74 | #define CRYPTO_UNSPEC 0 | ||
75 | #define CRYPTO_MAX_ALG_NAME 64 | 68 | #define CRYPTO_MAX_ALG_NAME 64 |
76 | 69 | ||
77 | #define CRYPTO_DIR_ENCRYPT 1 | ||
78 | #define CRYPTO_DIR_DECRYPT 0 | ||
79 | |||
80 | /* | 70 | /* |
81 | * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual | 71 | * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual |
82 | * declaration) is used to ensure that the crypto_tfm context structure is | 72 | * declaration) is used to ensure that the crypto_tfm context structure is |
@@ -148,19 +138,6 @@ struct cipher_alg { | |||
148 | unsigned int keylen); | 138 | unsigned int keylen); |
149 | void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); | 139 | void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); |
150 | void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); | 140 | void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); |
151 | |||
152 | unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc, | ||
153 | u8 *dst, const u8 *src, | ||
154 | unsigned int nbytes) __deprecated; | ||
155 | unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc, | ||
156 | u8 *dst, const u8 *src, | ||
157 | unsigned int nbytes) __deprecated; | ||
158 | unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc, | ||
159 | u8 *dst, const u8 *src, | ||
160 | unsigned int nbytes) __deprecated; | ||
161 | unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc, | ||
162 | u8 *dst, const u8 *src, | ||
163 | unsigned int nbytes) __deprecated; | ||
164 | }; | 141 | }; |
165 | 142 | ||
166 | struct digest_alg { | 143 | struct digest_alg { |
@@ -243,11 +220,6 @@ int crypto_unregister_alg(struct crypto_alg *alg); | |||
243 | #ifdef CONFIG_CRYPTO | 220 | #ifdef CONFIG_CRYPTO |
244 | int crypto_has_alg(const char *name, u32 type, u32 mask); | 221 | int crypto_has_alg(const char *name, u32 type, u32 mask); |
245 | #else | 222 | #else |
246 | static inline int crypto_alg_available(const char *name, u32 flags) | ||
247 | { | ||
248 | return 0; | ||
249 | } | ||
250 | |||
251 | static inline int crypto_has_alg(const char *name, u32 type, u32 mask) | 223 | static inline int crypto_has_alg(const char *name, u32 type, u32 mask) |
252 | { | 224 | { |
253 | return 0; | 225 | return 0; |
@@ -395,40 +367,11 @@ static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm) | |||
395 | return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK; | 367 | return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK; |
396 | } | 368 | } |
397 | 369 | ||
398 | static unsigned int crypto_tfm_alg_min_keysize(struct crypto_tfm *tfm) | ||
399 | __deprecated; | ||
400 | static inline unsigned int crypto_tfm_alg_min_keysize(struct crypto_tfm *tfm) | ||
401 | { | ||
402 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); | ||
403 | return tfm->__crt_alg->cra_cipher.cia_min_keysize; | ||
404 | } | ||
405 | |||
406 | static unsigned int crypto_tfm_alg_max_keysize(struct crypto_tfm *tfm) | ||
407 | __deprecated; | ||
408 | static inline unsigned int crypto_tfm_alg_max_keysize(struct crypto_tfm *tfm) | ||
409 | { | ||
410 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); | ||
411 | return tfm->__crt_alg->cra_cipher.cia_max_keysize; | ||
412 | } | ||
413 | |||
414 | static unsigned int crypto_tfm_alg_ivsize(struct crypto_tfm *tfm) __deprecated; | ||
415 | static inline unsigned int crypto_tfm_alg_ivsize(struct crypto_tfm *tfm) | ||
416 | { | ||
417 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); | ||
418 | return tfm->crt_cipher.cit_ivsize; | ||
419 | } | ||
420 | |||
421 | static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm) | 370 | static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm) |
422 | { | 371 | { |
423 | return tfm->__crt_alg->cra_blocksize; | 372 | return tfm->__crt_alg->cra_blocksize; |
424 | } | 373 | } |
425 | 374 | ||
426 | static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm *tfm) | ||
427 | { | ||
428 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST); | ||
429 | return tfm->__crt_alg->cra_digest.dia_digestsize; | ||
430 | } | ||
431 | |||
432 | static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm) | 375 | static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm) |
433 | { | 376 | { |
434 | return tfm->__crt_alg->cra_alignmask; | 377 | return tfm->__crt_alg->cra_alignmask; |
@@ -809,76 +752,6 @@ static inline int crypto_hash_setkey(struct crypto_hash *hash, | |||
809 | return crypto_hash_crt(hash)->setkey(hash, key, keylen); | 752 | return crypto_hash_crt(hash)->setkey(hash, key, keylen); |
810 | } | 753 | } |
811 | 754 | ||
812 | static int crypto_cipher_encrypt(struct crypto_tfm *tfm, | ||
813 | struct scatterlist *dst, | ||
814 | struct scatterlist *src, | ||
815 | unsigned int nbytes) __deprecated; | ||
816 | static inline int crypto_cipher_encrypt(struct crypto_tfm *tfm, | ||
817 | struct scatterlist *dst, | ||
818 | struct scatterlist *src, | ||
819 | unsigned int nbytes) | ||
820 | { | ||
821 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); | ||
822 | return tfm->crt_cipher.cit_encrypt(tfm, dst, src, nbytes); | ||
823 | } | ||
824 | |||
825 | static int crypto_cipher_encrypt_iv(struct crypto_tfm *tfm, | ||
826 | struct scatterlist *dst, | ||
827 | struct scatterlist *src, | ||
828 | unsigned int nbytes, u8 *iv) __deprecated; | ||
829 | static inline int crypto_cipher_encrypt_iv(struct crypto_tfm *tfm, | ||
830 | struct scatterlist *dst, | ||
831 | struct scatterlist *src, | ||
832 | unsigned int nbytes, u8 *iv) | ||
833 | { | ||
834 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); | ||
835 | return tfm->crt_cipher.cit_encrypt_iv(tfm, dst, src, nbytes, iv); | ||
836 | } | ||
837 | |||
838 | static int crypto_cipher_decrypt(struct crypto_tfm *tfm, | ||
839 | struct scatterlist *dst, | ||
840 | struct scatterlist *src, | ||
841 | unsigned int nbytes) __deprecated; | ||
842 | static inline int crypto_cipher_decrypt(struct crypto_tfm *tfm, | ||
843 | struct scatterlist *dst, | ||
844 | struct scatterlist *src, | ||
845 | unsigned int nbytes) | ||
846 | { | ||
847 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); | ||
848 | return tfm->crt_cipher.cit_decrypt(tfm, dst, src, nbytes); | ||
849 | } | ||
850 | |||
851 | static int crypto_cipher_decrypt_iv(struct crypto_tfm *tfm, | ||
852 | struct scatterlist *dst, | ||
853 | struct scatterlist *src, | ||
854 | unsigned int nbytes, u8 *iv) __deprecated; | ||
855 | static inline int crypto_cipher_decrypt_iv(struct crypto_tfm *tfm, | ||
856 | struct scatterlist *dst, | ||
857 | struct scatterlist *src, | ||
858 | unsigned int nbytes, u8 *iv) | ||
859 | { | ||
860 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); | ||
861 | return tfm->crt_cipher.cit_decrypt_iv(tfm, dst, src, nbytes, iv); | ||
862 | } | ||
863 | |||
864 | static void crypto_cipher_set_iv(struct crypto_tfm *tfm, | ||
865 | const u8 *src, unsigned int len) __deprecated; | ||
866 | static inline void crypto_cipher_set_iv(struct crypto_tfm *tfm, | ||
867 | const u8 *src, unsigned int len) | ||
868 | { | ||
869 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); | ||
870 | memcpy(tfm->crt_cipher.cit_iv, src, len); | ||
871 | } | ||
872 | |||
873 | static void crypto_cipher_get_iv(struct crypto_tfm *tfm, | ||
874 | u8 *dst, unsigned int len) __deprecated; | ||
875 | static inline void crypto_cipher_get_iv(struct crypto_tfm *tfm, | ||
876 | u8 *dst, unsigned int len) | ||
877 | { | ||
878 | BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER); | ||
879 | memcpy(dst, tfm->crt_cipher.cit_iv, len); | ||
880 | } | ||
881 | |||
882 | static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm) | 755 | static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm) |
883 | { | 756 | { |
884 | return (struct crypto_comp *)tfm; | 757 | return (struct crypto_comp *)tfm; |