diff options
Diffstat (limited to 'crypto/cipher.c')
-rw-r--r-- | crypto/cipher.c | 312 |
1 files changed, 216 insertions, 96 deletions
diff --git a/crypto/cipher.c b/crypto/cipher.c index f434ce7c2d0b..1c92c6bb138b 100644 --- a/crypto/cipher.c +++ b/crypto/cipher.c | |||
@@ -4,6 +4,7 @@ | |||
4 | * Cipher operations. | 4 | * Cipher operations. |
5 | * | 5 | * |
6 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> | 6 | * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> |
7 | * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> | ||
7 | * | 8 | * |
8 | * This program is free software; you can redistribute it and/or modify it | 9 | * This program is free software; you can redistribute it and/or modify it |
9 | * under the terms of the GNU General Public License as published by the Free | 10 | * under the terms of the GNU General Public License as published by the Free |
@@ -22,10 +23,6 @@ | |||
22 | #include "internal.h" | 23 | #include "internal.h" |
23 | #include "scatterwalk.h" | 24 | #include "scatterwalk.h" |
24 | 25 | ||
25 | typedef void (cryptfn_t)(void *, u8 *, const u8 *); | ||
26 | typedef void (procfn_t)(struct crypto_tfm *, u8 *, | ||
27 | u8*, cryptfn_t, void *); | ||
28 | |||
29 | static inline void xor_64(u8 *a, const u8 *b) | 26 | static inline void xor_64(u8 *a, const u8 *b) |
30 | { | 27 | { |
31 | ((u32 *)a)[0] ^= ((u32 *)b)[0]; | 28 | ((u32 *)a)[0] ^= ((u32 *)b)[0]; |
@@ -39,63 +36,70 @@ static inline void xor_128(u8 *a, const u8 *b) | |||
39 | ((u32 *)a)[2] ^= ((u32 *)b)[2]; | 36 | ((u32 *)a)[2] ^= ((u32 *)b)[2]; |
40 | ((u32 *)a)[3] ^= ((u32 *)b)[3]; | 37 | ((u32 *)a)[3] ^= ((u32 *)b)[3]; |
41 | } | 38 | } |
42 | 39 | ||
43 | static inline void *prepare_src(struct scatter_walk *walk, int bsize, | 40 | static unsigned int crypt_slow(const struct cipher_desc *desc, |
44 | void *tmp, int in_place) | 41 | struct scatter_walk *in, |
42 | struct scatter_walk *out, unsigned int bsize) | ||
45 | { | 43 | { |
46 | void *src = walk->data; | 44 | unsigned int alignmask = crypto_tfm_alg_alignmask(desc->tfm); |
47 | int n = bsize; | 45 | u8 buffer[bsize * 2 + alignmask]; |
46 | u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | ||
47 | u8 *dst = src + bsize; | ||
48 | unsigned int n; | ||
48 | 49 | ||
49 | if (unlikely(scatterwalk_across_pages(walk, bsize))) { | 50 | n = scatterwalk_copychunks(src, in, bsize, 0); |
50 | src = tmp; | 51 | scatterwalk_advance(in, n); |
51 | n = scatterwalk_copychunks(src, walk, bsize, 0); | 52 | |
52 | } | 53 | desc->prfn(desc, dst, src, bsize); |
53 | scatterwalk_advance(walk, n); | 54 | |
54 | return src; | 55 | n = scatterwalk_copychunks(dst, out, bsize, 1); |
56 | scatterwalk_advance(out, n); | ||
57 | |||
58 | return bsize; | ||
55 | } | 59 | } |
56 | 60 | ||
57 | static inline void *prepare_dst(struct scatter_walk *walk, int bsize, | 61 | static inline unsigned int crypt_fast(const struct cipher_desc *desc, |
58 | void *tmp, int in_place) | 62 | struct scatter_walk *in, |
63 | struct scatter_walk *out, | ||
64 | unsigned int nbytes, u8 *tmp) | ||
59 | { | 65 | { |
60 | void *dst = walk->data; | 66 | u8 *src, *dst; |
61 | 67 | ||
62 | if (unlikely(scatterwalk_across_pages(walk, bsize)) || in_place) | 68 | src = in->data; |
69 | dst = scatterwalk_samebuf(in, out) ? src : out->data; | ||
70 | |||
71 | if (tmp) { | ||
72 | memcpy(tmp, in->data, nbytes); | ||
73 | src = tmp; | ||
63 | dst = tmp; | 74 | dst = tmp; |
64 | return dst; | 75 | } |
65 | } | ||
66 | 76 | ||
67 | static inline void complete_src(struct scatter_walk *walk, int bsize, | 77 | nbytes = desc->prfn(desc, dst, src, nbytes); |
68 | void *src, int in_place) | ||
69 | { | ||
70 | } | ||
71 | 78 | ||
72 | static inline void complete_dst(struct scatter_walk *walk, int bsize, | 79 | if (tmp) |
73 | void *dst, int in_place) | 80 | memcpy(out->data, tmp, nbytes); |
74 | { | 81 | |
75 | int n = bsize; | 82 | scatterwalk_advance(in, nbytes); |
83 | scatterwalk_advance(out, nbytes); | ||
76 | 84 | ||
77 | if (unlikely(scatterwalk_across_pages(walk, bsize))) | 85 | return nbytes; |
78 | n = scatterwalk_copychunks(dst, walk, bsize, 1); | ||
79 | else if (in_place) | ||
80 | memcpy(walk->data, dst, bsize); | ||
81 | scatterwalk_advance(walk, n); | ||
82 | } | 86 | } |
83 | 87 | ||
84 | /* | 88 | /* |
85 | * Generic encrypt/decrypt wrapper for ciphers, handles operations across | 89 | * Generic encrypt/decrypt wrapper for ciphers, handles operations across |
86 | * multiple page boundaries by using temporary blocks. In user context, | 90 | * multiple page boundaries by using temporary blocks. In user context, |
87 | * the kernel is given a chance to schedule us once per block. | 91 | * the kernel is given a chance to schedule us once per page. |
88 | */ | 92 | */ |
89 | static int crypt(struct crypto_tfm *tfm, | 93 | static int crypt(const struct cipher_desc *desc, |
90 | struct scatterlist *dst, | 94 | struct scatterlist *dst, |
91 | struct scatterlist *src, | 95 | struct scatterlist *src, |
92 | unsigned int nbytes, cryptfn_t crfn, | 96 | unsigned int nbytes) |
93 | procfn_t prfn, void *info) | ||
94 | { | 97 | { |
95 | struct scatter_walk walk_in, walk_out; | 98 | struct scatter_walk walk_in, walk_out; |
99 | struct crypto_tfm *tfm = desc->tfm; | ||
96 | const unsigned int bsize = crypto_tfm_alg_blocksize(tfm); | 100 | const unsigned int bsize = crypto_tfm_alg_blocksize(tfm); |
97 | u8 tmp_src[bsize]; | 101 | unsigned int alignmask = crypto_tfm_alg_alignmask(tfm); |
98 | u8 tmp_dst[bsize]; | 102 | unsigned long buffer = 0; |
99 | 103 | ||
100 | if (!nbytes) | 104 | if (!nbytes) |
101 | return 0; | 105 | return 0; |
@@ -109,64 +113,144 @@ static int crypt(struct crypto_tfm *tfm, | |||
109 | scatterwalk_start(&walk_out, dst); | 113 | scatterwalk_start(&walk_out, dst); |
110 | 114 | ||
111 | for(;;) { | 115 | for(;;) { |
112 | u8 *src_p, *dst_p; | 116 | unsigned int n = nbytes; |
113 | int in_place; | 117 | u8 *tmp = NULL; |
118 | |||
119 | if (!scatterwalk_aligned(&walk_in, alignmask) || | ||
120 | !scatterwalk_aligned(&walk_out, alignmask)) { | ||
121 | if (!buffer) { | ||
122 | buffer = __get_free_page(GFP_ATOMIC); | ||
123 | if (!buffer) | ||
124 | n = 0; | ||
125 | } | ||
126 | tmp = (u8 *)buffer; | ||
127 | } | ||
114 | 128 | ||
115 | scatterwalk_map(&walk_in, 0); | 129 | scatterwalk_map(&walk_in, 0); |
116 | scatterwalk_map(&walk_out, 1); | 130 | scatterwalk_map(&walk_out, 1); |
117 | 131 | ||
118 | in_place = scatterwalk_samebuf(&walk_in, &walk_out); | 132 | n = scatterwalk_clamp(&walk_in, n); |
119 | 133 | n = scatterwalk_clamp(&walk_out, n); | |
120 | do { | ||
121 | src_p = prepare_src(&walk_in, bsize, tmp_src, | ||
122 | in_place); | ||
123 | dst_p = prepare_dst(&walk_out, bsize, tmp_dst, | ||
124 | in_place); | ||
125 | |||
126 | prfn(tfm, dst_p, src_p, crfn, info); | ||
127 | 134 | ||
128 | complete_src(&walk_in, bsize, src_p, in_place); | 135 | if (likely(n >= bsize)) |
129 | complete_dst(&walk_out, bsize, dst_p, in_place); | 136 | n = crypt_fast(desc, &walk_in, &walk_out, n, tmp); |
137 | else | ||
138 | n = crypt_slow(desc, &walk_in, &walk_out, bsize); | ||
130 | 139 | ||
131 | nbytes -= bsize; | 140 | nbytes -= n; |
132 | } while (nbytes && | ||
133 | !scatterwalk_across_pages(&walk_in, bsize) && | ||
134 | !scatterwalk_across_pages(&walk_out, bsize)); | ||
135 | 141 | ||
136 | scatterwalk_done(&walk_in, 0, nbytes); | 142 | scatterwalk_done(&walk_in, 0, nbytes); |
137 | scatterwalk_done(&walk_out, 1, nbytes); | 143 | scatterwalk_done(&walk_out, 1, nbytes); |
138 | 144 | ||
139 | if (!nbytes) | 145 | if (!nbytes) |
140 | return 0; | 146 | break; |
141 | 147 | ||
142 | crypto_yield(tfm); | 148 | crypto_yield(tfm); |
143 | } | 149 | } |
150 | |||
151 | if (buffer) | ||
152 | free_page(buffer); | ||
153 | |||
154 | return 0; | ||
144 | } | 155 | } |
145 | 156 | ||
146 | static void cbc_process_encrypt(struct crypto_tfm *tfm, u8 *dst, u8 *src, | 157 | static int crypt_iv_unaligned(struct cipher_desc *desc, |
147 | cryptfn_t fn, void *info) | 158 | struct scatterlist *dst, |
159 | struct scatterlist *src, | ||
160 | unsigned int nbytes) | ||
148 | { | 161 | { |
149 | u8 *iv = info; | 162 | struct crypto_tfm *tfm = desc->tfm; |
163 | unsigned int alignmask = crypto_tfm_alg_alignmask(tfm); | ||
164 | u8 *iv = desc->info; | ||
150 | 165 | ||
151 | tfm->crt_u.cipher.cit_xor_block(iv, src); | 166 | if (unlikely(((unsigned long)iv & alignmask))) { |
152 | fn(crypto_tfm_ctx(tfm), dst, iv); | 167 | unsigned int ivsize = tfm->crt_cipher.cit_ivsize; |
153 | memcpy(iv, dst, crypto_tfm_alg_blocksize(tfm)); | 168 | u8 buffer[ivsize + alignmask]; |
169 | u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | ||
170 | int err; | ||
171 | |||
172 | desc->info = memcpy(tmp, iv, ivsize); | ||
173 | err = crypt(desc, dst, src, nbytes); | ||
174 | memcpy(iv, tmp, ivsize); | ||
175 | |||
176 | return err; | ||
177 | } | ||
178 | |||
179 | return crypt(desc, dst, src, nbytes); | ||
154 | } | 180 | } |
155 | 181 | ||
156 | static void cbc_process_decrypt(struct crypto_tfm *tfm, u8 *dst, u8 *src, | 182 | static unsigned int cbc_process_encrypt(const struct cipher_desc *desc, |
157 | cryptfn_t fn, void *info) | 183 | u8 *dst, const u8 *src, |
184 | unsigned int nbytes) | ||
158 | { | 185 | { |
159 | u8 *iv = info; | 186 | struct crypto_tfm *tfm = desc->tfm; |
187 | void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; | ||
188 | int bsize = crypto_tfm_alg_blocksize(tfm); | ||
189 | |||
190 | void (*fn)(void *, u8 *, const u8 *) = desc->crfn; | ||
191 | u8 *iv = desc->info; | ||
192 | unsigned int done = 0; | ||
193 | |||
194 | do { | ||
195 | xor(iv, src); | ||
196 | fn(crypto_tfm_ctx(tfm), dst, iv); | ||
197 | memcpy(iv, dst, bsize); | ||
160 | 198 | ||
161 | fn(crypto_tfm_ctx(tfm), dst, src); | 199 | src += bsize; |
162 | tfm->crt_u.cipher.cit_xor_block(dst, iv); | 200 | dst += bsize; |
163 | memcpy(iv, src, crypto_tfm_alg_blocksize(tfm)); | 201 | } while ((done += bsize) < nbytes); |
202 | |||
203 | return done; | ||
164 | } | 204 | } |
165 | 205 | ||
166 | static void ecb_process(struct crypto_tfm *tfm, u8 *dst, u8 *src, | 206 | static unsigned int cbc_process_decrypt(const struct cipher_desc *desc, |
167 | cryptfn_t fn, void *info) | 207 | u8 *dst, const u8 *src, |
208 | unsigned int nbytes) | ||
168 | { | 209 | { |
169 | fn(crypto_tfm_ctx(tfm), dst, src); | 210 | struct crypto_tfm *tfm = desc->tfm; |
211 | void (*xor)(u8 *, const u8 *) = tfm->crt_u.cipher.cit_xor_block; | ||
212 | int bsize = crypto_tfm_alg_blocksize(tfm); | ||
213 | |||
214 | u8 stack[src == dst ? bsize : 0]; | ||
215 | u8 *buf = stack; | ||
216 | u8 **dst_p = src == dst ? &buf : &dst; | ||
217 | |||
218 | void (*fn)(void *, u8 *, const u8 *) = desc->crfn; | ||
219 | u8 *iv = desc->info; | ||
220 | unsigned int done = 0; | ||
221 | |||
222 | do { | ||
223 | u8 *tmp_dst = *dst_p; | ||
224 | |||
225 | fn(crypto_tfm_ctx(tfm), tmp_dst, src); | ||
226 | xor(tmp_dst, iv); | ||
227 | memcpy(iv, src, bsize); | ||
228 | if (tmp_dst != dst) | ||
229 | memcpy(dst, tmp_dst, bsize); | ||
230 | |||
231 | src += bsize; | ||
232 | dst += bsize; | ||
233 | } while ((done += bsize) < nbytes); | ||
234 | |||
235 | return done; | ||
236 | } | ||
237 | |||
238 | static unsigned int ecb_process(const struct cipher_desc *desc, u8 *dst, | ||
239 | const u8 *src, unsigned int nbytes) | ||
240 | { | ||
241 | struct crypto_tfm *tfm = desc->tfm; | ||
242 | int bsize = crypto_tfm_alg_blocksize(tfm); | ||
243 | void (*fn)(void *, u8 *, const u8 *) = desc->crfn; | ||
244 | unsigned int done = 0; | ||
245 | |||
246 | do { | ||
247 | fn(crypto_tfm_ctx(tfm), dst, src); | ||
248 | |||
249 | src += bsize; | ||
250 | dst += bsize; | ||
251 | } while ((done += bsize) < nbytes); | ||
252 | |||
253 | return done; | ||
170 | } | 254 | } |
171 | 255 | ||
172 | static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) | 256 | static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) |
@@ -185,9 +269,14 @@ static int ecb_encrypt(struct crypto_tfm *tfm, | |||
185 | struct scatterlist *dst, | 269 | struct scatterlist *dst, |
186 | struct scatterlist *src, unsigned int nbytes) | 270 | struct scatterlist *src, unsigned int nbytes) |
187 | { | 271 | { |
188 | return crypt(tfm, dst, src, nbytes, | 272 | struct cipher_desc desc; |
189 | tfm->__crt_alg->cra_cipher.cia_encrypt, | 273 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; |
190 | ecb_process, NULL); | 274 | |
275 | desc.tfm = tfm; | ||
276 | desc.crfn = cipher->cia_encrypt; | ||
277 | desc.prfn = cipher->cia_encrypt_ecb ?: ecb_process; | ||
278 | |||
279 | return crypt(&desc, dst, src, nbytes); | ||
191 | } | 280 | } |
192 | 281 | ||
193 | static int ecb_decrypt(struct crypto_tfm *tfm, | 282 | static int ecb_decrypt(struct crypto_tfm *tfm, |
@@ -195,9 +284,14 @@ static int ecb_decrypt(struct crypto_tfm *tfm, | |||
195 | struct scatterlist *src, | 284 | struct scatterlist *src, |
196 | unsigned int nbytes) | 285 | unsigned int nbytes) |
197 | { | 286 | { |
198 | return crypt(tfm, dst, src, nbytes, | 287 | struct cipher_desc desc; |
199 | tfm->__crt_alg->cra_cipher.cia_decrypt, | 288 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; |
200 | ecb_process, NULL); | 289 | |
290 | desc.tfm = tfm; | ||
291 | desc.crfn = cipher->cia_decrypt; | ||
292 | desc.prfn = cipher->cia_decrypt_ecb ?: ecb_process; | ||
293 | |||
294 | return crypt(&desc, dst, src, nbytes); | ||
201 | } | 295 | } |
202 | 296 | ||
203 | static int cbc_encrypt(struct crypto_tfm *tfm, | 297 | static int cbc_encrypt(struct crypto_tfm *tfm, |
@@ -205,9 +299,15 @@ static int cbc_encrypt(struct crypto_tfm *tfm, | |||
205 | struct scatterlist *src, | 299 | struct scatterlist *src, |
206 | unsigned int nbytes) | 300 | unsigned int nbytes) |
207 | { | 301 | { |
208 | return crypt(tfm, dst, src, nbytes, | 302 | struct cipher_desc desc; |
209 | tfm->__crt_alg->cra_cipher.cia_encrypt, | 303 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; |
210 | cbc_process_encrypt, tfm->crt_cipher.cit_iv); | 304 | |
305 | desc.tfm = tfm; | ||
306 | desc.crfn = cipher->cia_encrypt; | ||
307 | desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt; | ||
308 | desc.info = tfm->crt_cipher.cit_iv; | ||
309 | |||
310 | return crypt(&desc, dst, src, nbytes); | ||
211 | } | 311 | } |
212 | 312 | ||
213 | static int cbc_encrypt_iv(struct crypto_tfm *tfm, | 313 | static int cbc_encrypt_iv(struct crypto_tfm *tfm, |
@@ -215,9 +315,15 @@ static int cbc_encrypt_iv(struct crypto_tfm *tfm, | |||
215 | struct scatterlist *src, | 315 | struct scatterlist *src, |
216 | unsigned int nbytes, u8 *iv) | 316 | unsigned int nbytes, u8 *iv) |
217 | { | 317 | { |
218 | return crypt(tfm, dst, src, nbytes, | 318 | struct cipher_desc desc; |
219 | tfm->__crt_alg->cra_cipher.cia_encrypt, | 319 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; |
220 | cbc_process_encrypt, iv); | 320 | |
321 | desc.tfm = tfm; | ||
322 | desc.crfn = cipher->cia_encrypt; | ||
323 | desc.prfn = cipher->cia_encrypt_cbc ?: cbc_process_encrypt; | ||
324 | desc.info = iv; | ||
325 | |||
326 | return crypt_iv_unaligned(&desc, dst, src, nbytes); | ||
221 | } | 327 | } |
222 | 328 | ||
223 | static int cbc_decrypt(struct crypto_tfm *tfm, | 329 | static int cbc_decrypt(struct crypto_tfm *tfm, |
@@ -225,9 +331,15 @@ static int cbc_decrypt(struct crypto_tfm *tfm, | |||
225 | struct scatterlist *src, | 331 | struct scatterlist *src, |
226 | unsigned int nbytes) | 332 | unsigned int nbytes) |
227 | { | 333 | { |
228 | return crypt(tfm, dst, src, nbytes, | 334 | struct cipher_desc desc; |
229 | tfm->__crt_alg->cra_cipher.cia_decrypt, | 335 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; |
230 | cbc_process_decrypt, tfm->crt_cipher.cit_iv); | 336 | |
337 | desc.tfm = tfm; | ||
338 | desc.crfn = cipher->cia_decrypt; | ||
339 | desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt; | ||
340 | desc.info = tfm->crt_cipher.cit_iv; | ||
341 | |||
342 | return crypt(&desc, dst, src, nbytes); | ||
231 | } | 343 | } |
232 | 344 | ||
233 | static int cbc_decrypt_iv(struct crypto_tfm *tfm, | 345 | static int cbc_decrypt_iv(struct crypto_tfm *tfm, |
@@ -235,9 +347,15 @@ static int cbc_decrypt_iv(struct crypto_tfm *tfm, | |||
235 | struct scatterlist *src, | 347 | struct scatterlist *src, |
236 | unsigned int nbytes, u8 *iv) | 348 | unsigned int nbytes, u8 *iv) |
237 | { | 349 | { |
238 | return crypt(tfm, dst, src, nbytes, | 350 | struct cipher_desc desc; |
239 | tfm->__crt_alg->cra_cipher.cia_decrypt, | 351 | struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; |
240 | cbc_process_decrypt, iv); | 352 | |
353 | desc.tfm = tfm; | ||
354 | desc.crfn = cipher->cia_decrypt; | ||
355 | desc.prfn = cipher->cia_decrypt_cbc ?: cbc_process_decrypt; | ||
356 | desc.info = iv; | ||
357 | |||
358 | return crypt_iv_unaligned(&desc, dst, src, nbytes); | ||
241 | } | 359 | } |
242 | 360 | ||
243 | static int nocrypt(struct crypto_tfm *tfm, | 361 | static int nocrypt(struct crypto_tfm *tfm, |
@@ -306,6 +424,8 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm) | |||
306 | } | 424 | } |
307 | 425 | ||
308 | if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) { | 426 | if (ops->cit_mode == CRYPTO_TFM_MODE_CBC) { |
427 | unsigned int align; | ||
428 | unsigned long addr; | ||
309 | 429 | ||
310 | switch (crypto_tfm_alg_blocksize(tfm)) { | 430 | switch (crypto_tfm_alg_blocksize(tfm)) { |
311 | case 8: | 431 | case 8: |
@@ -325,9 +445,11 @@ int crypto_init_cipher_ops(struct crypto_tfm *tfm) | |||
325 | } | 445 | } |
326 | 446 | ||
327 | ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm); | 447 | ops->cit_ivsize = crypto_tfm_alg_blocksize(tfm); |
328 | ops->cit_iv = kmalloc(ops->cit_ivsize, GFP_KERNEL); | 448 | align = crypto_tfm_alg_alignmask(tfm) + 1; |
329 | if (ops->cit_iv == NULL) | 449 | addr = (unsigned long)crypto_tfm_ctx(tfm); |
330 | ret = -ENOMEM; | 450 | addr = ALIGN(addr, align); |
451 | addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align); | ||
452 | ops->cit_iv = (void *)addr; | ||
331 | } | 453 | } |
332 | 454 | ||
333 | out: | 455 | out: |
@@ -336,6 +458,4 @@ out: | |||
336 | 458 | ||
337 | void crypto_exit_cipher_ops(struct crypto_tfm *tfm) | 459 | void crypto_exit_cipher_ops(struct crypto_tfm *tfm) |
338 | { | 460 | { |
339 | if (tfm->crt_cipher.cit_iv) | ||
340 | kfree(tfm->crt_cipher.cit_iv); | ||
341 | } | 461 | } |