aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--crypto/Kconfig4
-rw-r--r--crypto/Makefile2
-rw-r--r--crypto/blkcipher.c405
-rw-r--r--include/crypto/algapi.h65
-rw-r--r--include/linux/crypto.h179
5 files changed, 655 insertions, 0 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index 4ce509dba329..68790ad7308d 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -16,6 +16,10 @@ config CRYPTO_ALGAPI
16 help 16 help
17 This option provides the API for cryptographic algorithms. 17 This option provides the API for cryptographic algorithms.
18 18
19config CRYPTO_BLKCIPHER
20 tristate
21 select CRYPTO_ALGAPI
22
19config CRYPTO_MANAGER 23config CRYPTO_MANAGER
20 tristate "Cryptographic algorithm manager" 24 tristate "Cryptographic algorithm manager"
21 select CRYPTO_ALGAPI 25 select CRYPTO_ALGAPI
diff --git a/crypto/Makefile b/crypto/Makefile
index b8745f3d3595..b5051951c636 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -8,6 +8,8 @@ crypto_algapi-$(CONFIG_PROC_FS) += proc.o
8crypto_algapi-objs := algapi.o $(crypto_algapi-y) 8crypto_algapi-objs := algapi.o $(crypto_algapi-y)
9obj-$(CONFIG_CRYPTO_ALGAPI) += crypto_algapi.o 9obj-$(CONFIG_CRYPTO_ALGAPI) += crypto_algapi.o
10 10
11obj-$(CONFIG_CRYPTO_BLKCIPHER) += blkcipher.o
12
11obj-$(CONFIG_CRYPTO_MANAGER) += cryptomgr.o 13obj-$(CONFIG_CRYPTO_MANAGER) += cryptomgr.o
12obj-$(CONFIG_CRYPTO_HMAC) += hmac.o 14obj-$(CONFIG_CRYPTO_HMAC) += hmac.o
13obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o 15obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
new file mode 100644
index 000000000000..034c939bf91a
--- /dev/null
+++ b/crypto/blkcipher.c
@@ -0,0 +1,405 @@
1/*
2 * Block chaining cipher operations.
3 *
4 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
5 * multiple page boundaries by using temporary blocks. In user context,
6 * the kernel is given a chance to schedule us once per page.
7 *
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
9 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version.
14 *
15 */
16
17#include <linux/crypto.h>
18#include <linux/errno.h>
19#include <linux/kernel.h>
20#include <linux/io.h>
21#include <linux/module.h>
22#include <linux/scatterlist.h>
23#include <linux/seq_file.h>
24#include <linux/slab.h>
25#include <linux/string.h>
26
27#include "internal.h"
28#include "scatterwalk.h"
29
30enum {
31 BLKCIPHER_WALK_PHYS = 1 << 0,
32 BLKCIPHER_WALK_SLOW = 1 << 1,
33 BLKCIPHER_WALK_COPY = 1 << 2,
34 BLKCIPHER_WALK_DIFF = 1 << 3,
35};
36
37static int blkcipher_walk_next(struct blkcipher_desc *desc,
38 struct blkcipher_walk *walk);
39static int blkcipher_walk_first(struct blkcipher_desc *desc,
40 struct blkcipher_walk *walk);
41
42static inline void blkcipher_map_src(struct blkcipher_walk *walk)
43{
44 walk->src.virt.addr = scatterwalk_map(&walk->in, 0);
45}
46
47static inline void blkcipher_map_dst(struct blkcipher_walk *walk)
48{
49 walk->dst.virt.addr = scatterwalk_map(&walk->out, 1);
50}
51
52static inline void blkcipher_unmap_src(struct blkcipher_walk *walk)
53{
54 scatterwalk_unmap(walk->src.virt.addr, 0);
55}
56
57static inline void blkcipher_unmap_dst(struct blkcipher_walk *walk)
58{
59 scatterwalk_unmap(walk->dst.virt.addr, 1);
60}
61
62static inline u8 *blkcipher_get_spot(u8 *start, unsigned int len)
63{
64 if (offset_in_page(start + len) < len)
65 return (u8 *)((unsigned long)(start + len) & PAGE_MASK);
66 return start;
67}
68
69static inline unsigned int blkcipher_done_slow(struct crypto_blkcipher *tfm,
70 struct blkcipher_walk *walk,
71 unsigned int bsize)
72{
73 u8 *addr;
74 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
75
76 addr = (u8 *)ALIGN((unsigned long)walk->buffer, alignmask + 1);
77 addr = blkcipher_get_spot(addr, bsize);
78 scatterwalk_copychunks(addr, &walk->out, bsize, 1);
79 return bsize;
80}
81
82static inline unsigned int blkcipher_done_fast(struct blkcipher_walk *walk,
83 unsigned int n)
84{
85 n = walk->nbytes - n;
86
87 if (walk->flags & BLKCIPHER_WALK_COPY) {
88 blkcipher_map_dst(walk);
89 memcpy(walk->dst.virt.addr, walk->page, n);
90 blkcipher_unmap_dst(walk);
91 } else if (!(walk->flags & BLKCIPHER_WALK_PHYS)) {
92 blkcipher_unmap_src(walk);
93 if (walk->flags & BLKCIPHER_WALK_DIFF)
94 blkcipher_unmap_dst(walk);
95 }
96
97 scatterwalk_advance(&walk->in, n);
98 scatterwalk_advance(&walk->out, n);
99
100 return n;
101}
102
103int blkcipher_walk_done(struct blkcipher_desc *desc,
104 struct blkcipher_walk *walk, int err)
105{
106 struct crypto_blkcipher *tfm = desc->tfm;
107 unsigned int nbytes = 0;
108
109 if (likely(err >= 0)) {
110 unsigned int bsize = crypto_blkcipher_blocksize(tfm);
111 unsigned int n;
112
113 if (likely(!(walk->flags & BLKCIPHER_WALK_SLOW)))
114 n = blkcipher_done_fast(walk, err);
115 else
116 n = blkcipher_done_slow(tfm, walk, bsize);
117
118 nbytes = walk->total - n;
119 err = 0;
120 }
121
122 scatterwalk_done(&walk->in, 0, nbytes);
123 scatterwalk_done(&walk->out, 1, nbytes);
124
125 walk->total = nbytes;
126 walk->nbytes = nbytes;
127
128 if (nbytes) {
129 crypto_yield(desc->flags);
130 return blkcipher_walk_next(desc, walk);
131 }
132
133 if (walk->iv != desc->info)
134 memcpy(desc->info, walk->iv, crypto_blkcipher_ivsize(tfm));
135 if (walk->buffer != walk->page)
136 kfree(walk->buffer);
137 if (walk->page)
138 free_page((unsigned long)walk->page);
139
140 return err;
141}
142EXPORT_SYMBOL_GPL(blkcipher_walk_done);
143
144static inline int blkcipher_next_slow(struct blkcipher_desc *desc,
145 struct blkcipher_walk *walk,
146 unsigned int bsize,
147 unsigned int alignmask)
148{
149 unsigned int n;
150
151 if (walk->buffer)
152 goto ok;
153
154 walk->buffer = walk->page;
155 if (walk->buffer)
156 goto ok;
157
158 n = bsize * 2 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
159 walk->buffer = kmalloc(n, GFP_ATOMIC);
160 if (!walk->buffer)
161 return blkcipher_walk_done(desc, walk, -ENOMEM);
162
163ok:
164 walk->dst.virt.addr = (u8 *)ALIGN((unsigned long)walk->buffer,
165 alignmask + 1);
166 walk->dst.virt.addr = blkcipher_get_spot(walk->dst.virt.addr, bsize);
167 walk->src.virt.addr = blkcipher_get_spot(walk->dst.virt.addr + bsize,
168 bsize);
169
170 scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0);
171
172 walk->nbytes = bsize;
173 walk->flags |= BLKCIPHER_WALK_SLOW;
174
175 return 0;
176}
177
178static inline int blkcipher_next_copy(struct blkcipher_walk *walk)
179{
180 u8 *tmp = walk->page;
181
182 blkcipher_map_src(walk);
183 memcpy(tmp, walk->src.virt.addr, walk->nbytes);
184 blkcipher_unmap_src(walk);
185
186 walk->src.virt.addr = tmp;
187 walk->dst.virt.addr = tmp;
188
189 return 0;
190}
191
192static inline int blkcipher_next_fast(struct blkcipher_desc *desc,
193 struct blkcipher_walk *walk)
194{
195 unsigned long diff;
196
197 walk->src.phys.page = scatterwalk_page(&walk->in);
198 walk->src.phys.offset = offset_in_page(walk->in.offset);
199 walk->dst.phys.page = scatterwalk_page(&walk->out);
200 walk->dst.phys.offset = offset_in_page(walk->out.offset);
201
202 if (walk->flags & BLKCIPHER_WALK_PHYS)
203 return 0;
204
205 diff = walk->src.phys.offset - walk->dst.phys.offset;
206 diff |= walk->src.virt.page - walk->dst.virt.page;
207
208 blkcipher_map_src(walk);
209 walk->dst.virt.addr = walk->src.virt.addr;
210
211 if (diff) {
212 walk->flags |= BLKCIPHER_WALK_DIFF;
213 blkcipher_map_dst(walk);
214 }
215
216 return 0;
217}
218
219static int blkcipher_walk_next(struct blkcipher_desc *desc,
220 struct blkcipher_walk *walk)
221{
222 struct crypto_blkcipher *tfm = desc->tfm;
223 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
224 unsigned int bsize = crypto_blkcipher_blocksize(tfm);
225 unsigned int n;
226 int err;
227
228 n = walk->total;
229 if (unlikely(n < bsize)) {
230 desc->flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
231 return blkcipher_walk_done(desc, walk, -EINVAL);
232 }
233
234 walk->flags &= ~(BLKCIPHER_WALK_SLOW | BLKCIPHER_WALK_COPY |
235 BLKCIPHER_WALK_DIFF);
236 if (!scatterwalk_aligned(&walk->in, alignmask) ||
237 !scatterwalk_aligned(&walk->out, alignmask)) {
238 walk->flags |= BLKCIPHER_WALK_COPY;
239 if (!walk->page) {
240 walk->page = (void *)__get_free_page(GFP_ATOMIC);
241 if (!walk->page)
242 n = 0;
243 }
244 }
245
246 n = scatterwalk_clamp(&walk->in, n);
247 n = scatterwalk_clamp(&walk->out, n);
248
249 if (unlikely(n < bsize)) {
250 err = blkcipher_next_slow(desc, walk, bsize, alignmask);
251 goto set_phys_lowmem;
252 }
253
254 walk->nbytes = n;
255 if (walk->flags & BLKCIPHER_WALK_COPY) {
256 err = blkcipher_next_copy(walk);
257 goto set_phys_lowmem;
258 }
259
260 return blkcipher_next_fast(desc, walk);
261
262set_phys_lowmem:
263 if (walk->flags & BLKCIPHER_WALK_PHYS) {
264 walk->src.phys.page = virt_to_page(walk->src.virt.addr);
265 walk->dst.phys.page = virt_to_page(walk->dst.virt.addr);
266 walk->src.phys.offset &= PAGE_SIZE - 1;
267 walk->dst.phys.offset &= PAGE_SIZE - 1;
268 }
269 return err;
270}
271
272static inline int blkcipher_copy_iv(struct blkcipher_walk *walk,
273 struct crypto_blkcipher *tfm,
274 unsigned int alignmask)
275{
276 unsigned bs = crypto_blkcipher_blocksize(tfm);
277 unsigned int ivsize = crypto_blkcipher_ivsize(tfm);
278 unsigned int size = bs * 2 + ivsize + max(bs, ivsize) - (alignmask + 1);
279 u8 *iv;
280
281 size += alignmask & ~(crypto_tfm_ctx_alignment() - 1);
282 walk->buffer = kmalloc(size, GFP_ATOMIC);
283 if (!walk->buffer)
284 return -ENOMEM;
285
286 iv = (u8 *)ALIGN((unsigned long)walk->buffer, alignmask + 1);
287 iv = blkcipher_get_spot(iv, bs) + bs;
288 iv = blkcipher_get_spot(iv, bs) + bs;
289 iv = blkcipher_get_spot(iv, ivsize);
290
291 walk->iv = memcpy(iv, walk->iv, ivsize);
292 return 0;
293}
294
295int blkcipher_walk_virt(struct blkcipher_desc *desc,
296 struct blkcipher_walk *walk)
297{
298 walk->flags &= ~BLKCIPHER_WALK_PHYS;
299 return blkcipher_walk_first(desc, walk);
300}
301EXPORT_SYMBOL_GPL(blkcipher_walk_virt);
302
303int blkcipher_walk_phys(struct blkcipher_desc *desc,
304 struct blkcipher_walk *walk)
305{
306 walk->flags |= BLKCIPHER_WALK_PHYS;
307 return blkcipher_walk_first(desc, walk);
308}
309EXPORT_SYMBOL_GPL(blkcipher_walk_phys);
310
311static int blkcipher_walk_first(struct blkcipher_desc *desc,
312 struct blkcipher_walk *walk)
313{
314 struct crypto_blkcipher *tfm = desc->tfm;
315 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
316
317 walk->nbytes = walk->total;
318 if (unlikely(!walk->total))
319 return 0;
320
321 walk->buffer = NULL;
322 walk->iv = desc->info;
323 if (unlikely(((unsigned long)walk->iv & alignmask))) {
324 int err = blkcipher_copy_iv(walk, tfm, alignmask);
325 if (err)
326 return err;
327 }
328
329 scatterwalk_start(&walk->in, walk->in.sg);
330 scatterwalk_start(&walk->out, walk->out.sg);
331 walk->page = NULL;
332
333 return blkcipher_walk_next(desc, walk);
334}
335
336static int setkey(struct crypto_tfm *tfm, const u8 *key,
337 unsigned int keylen)
338{
339 struct blkcipher_alg *cipher = &tfm->__crt_alg->cra_blkcipher;
340
341 if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) {
342 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
343 return -EINVAL;
344 }
345
346 return cipher->setkey(tfm, key, keylen);
347}
348
349static unsigned int crypto_blkcipher_ctxsize(struct crypto_alg *alg)
350{
351 struct blkcipher_alg *cipher = &alg->cra_blkcipher;
352 unsigned int len = alg->cra_ctxsize;
353
354 if (cipher->ivsize) {
355 len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1);
356 len += cipher->ivsize;
357 }
358
359 return len;
360}
361
362static int crypto_init_blkcipher_ops(struct crypto_tfm *tfm)
363{
364 struct blkcipher_tfm *crt = &tfm->crt_blkcipher;
365 struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
366 unsigned long align = crypto_tfm_alg_alignmask(tfm) + 1;
367 unsigned long addr;
368
369 if (alg->ivsize > PAGE_SIZE / 8)
370 return -EINVAL;
371
372 crt->setkey = setkey;
373 crt->encrypt = alg->encrypt;
374 crt->decrypt = alg->decrypt;
375
376 addr = (unsigned long)crypto_tfm_ctx(tfm);
377 addr = ALIGN(addr, align);
378 addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align);
379 crt->iv = (void *)addr;
380
381 return 0;
382}
383
384static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
385 __attribute_used__;
386static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
387{
388 seq_printf(m, "type : blkcipher\n");
389 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
390 seq_printf(m, "min keysize : %u\n", alg->cra_blkcipher.min_keysize);
391 seq_printf(m, "max keysize : %u\n", alg->cra_blkcipher.max_keysize);
392 seq_printf(m, "ivsize : %u\n", alg->cra_blkcipher.ivsize);
393}
394
395const struct crypto_type crypto_blkcipher_type = {
396 .ctxsize = crypto_blkcipher_ctxsize,
397 .init = crypto_init_blkcipher_ops,
398#ifdef CONFIG_PROC_FS
399 .show = crypto_blkcipher_show,
400#endif
401};
402EXPORT_SYMBOL_GPL(crypto_blkcipher_type);
403
404MODULE_LICENSE("GPL");
405MODULE_DESCRIPTION("Generic block chaining cipher type");
diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h
index f21ae672e8a8..f3946baf0c07 100644
--- a/include/crypto/algapi.h
+++ b/include/crypto/algapi.h
@@ -55,6 +55,34 @@ struct scatter_walk {
55 unsigned int offset; 55 unsigned int offset;
56}; 56};
57 57
58struct blkcipher_walk {
59 union {
60 struct {
61 struct page *page;
62 unsigned long offset;
63 } phys;
64
65 struct {
66 u8 *page;
67 u8 *addr;
68 } virt;
69 } src, dst;
70
71 struct scatter_walk in;
72 unsigned int nbytes;
73
74 struct scatter_walk out;
75 unsigned int total;
76
77 void *page;
78 u8 *buffer;
79 u8 *iv;
80
81 int flags;
82};
83
84extern const struct crypto_type crypto_blkcipher_type;
85
58int crypto_register_template(struct crypto_template *tmpl); 86int crypto_register_template(struct crypto_template *tmpl);
59void crypto_unregister_template(struct crypto_template *tmpl); 87void crypto_unregister_template(struct crypto_template *tmpl);
60struct crypto_template *crypto_lookup_template(const char *name); 88struct crypto_template *crypto_lookup_template(const char *name);
@@ -69,15 +97,52 @@ struct crypto_alg *crypto_get_attr_alg(void *param, unsigned int len,
69struct crypto_instance *crypto_alloc_instance(const char *name, 97struct crypto_instance *crypto_alloc_instance(const char *name,
70 struct crypto_alg *alg); 98 struct crypto_alg *alg);
71 99
100int blkcipher_walk_done(struct blkcipher_desc *desc,
101 struct blkcipher_walk *walk, int err);
102int blkcipher_walk_virt(struct blkcipher_desc *desc,
103 struct blkcipher_walk *walk);
104int blkcipher_walk_phys(struct blkcipher_desc *desc,
105 struct blkcipher_walk *walk);
106
107static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
108{
109 unsigned long addr = (unsigned long)crypto_tfm_ctx(tfm);
110 unsigned long align = crypto_tfm_alg_alignmask(tfm);
111
112 if (align <= crypto_tfm_ctx_alignment())
113 align = 1;
114 return (void *)ALIGN(addr, align);
115}
116
72static inline void *crypto_instance_ctx(struct crypto_instance *inst) 117static inline void *crypto_instance_ctx(struct crypto_instance *inst)
73{ 118{
74 return inst->__ctx; 119 return inst->__ctx;
75} 120}
76 121
122static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
123{
124 return crypto_tfm_ctx(&tfm->base);
125}
126
127static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
128{
129 return crypto_tfm_ctx_aligned(&tfm->base);
130}
131
77static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm) 132static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
78{ 133{
79 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher; 134 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
80} 135}
81 136
137static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
138 struct scatterlist *dst,
139 struct scatterlist *src,
140 unsigned int nbytes)
141{
142 walk->in.sg = src;
143 walk->out.sg = dst;
144 walk->total = nbytes;
145}
146
82#endif /* _CRYPTO_ALGAPI_H */ 147#endif /* _CRYPTO_ALGAPI_H */
83 148
diff --git a/include/linux/crypto.h b/include/linux/crypto.h
index fdecee83878c..5a5466d518e8 100644
--- a/include/linux/crypto.h
+++ b/include/linux/crypto.h
@@ -32,6 +32,7 @@
32#define CRYPTO_ALG_TYPE_MASK 0x0000000f 32#define CRYPTO_ALG_TYPE_MASK 0x0000000f
33#define CRYPTO_ALG_TYPE_CIPHER 0x00000001 33#define CRYPTO_ALG_TYPE_CIPHER 0x00000001
34#define CRYPTO_ALG_TYPE_DIGEST 0x00000002 34#define CRYPTO_ALG_TYPE_DIGEST 0x00000002
35#define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000003
35#define CRYPTO_ALG_TYPE_COMPRESS 0x00000004 36#define CRYPTO_ALG_TYPE_COMPRESS 0x00000004
36 37
37#define CRYPTO_ALG_LARVAL 0x00000010 38#define CRYPTO_ALG_LARVAL 0x00000010
@@ -89,9 +90,16 @@
89#endif 90#endif
90 91
91struct scatterlist; 92struct scatterlist;
93struct crypto_blkcipher;
92struct crypto_tfm; 94struct crypto_tfm;
93struct crypto_type; 95struct crypto_type;
94 96
97struct blkcipher_desc {
98 struct crypto_blkcipher *tfm;
99 void *info;
100 u32 flags;
101};
102
95struct cipher_desc { 103struct cipher_desc {
96 struct crypto_tfm *tfm; 104 struct crypto_tfm *tfm;
97 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 105 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
@@ -104,6 +112,21 @@ struct cipher_desc {
104 * Algorithms: modular crypto algorithm implementations, managed 112 * Algorithms: modular crypto algorithm implementations, managed
105 * via crypto_register_alg() and crypto_unregister_alg(). 113 * via crypto_register_alg() and crypto_unregister_alg().
106 */ 114 */
115struct blkcipher_alg {
116 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
117 unsigned int keylen);
118 int (*encrypt)(struct blkcipher_desc *desc,
119 struct scatterlist *dst, struct scatterlist *src,
120 unsigned int nbytes);
121 int (*decrypt)(struct blkcipher_desc *desc,
122 struct scatterlist *dst, struct scatterlist *src,
123 unsigned int nbytes);
124
125 unsigned int min_keysize;
126 unsigned int max_keysize;
127 unsigned int ivsize;
128};
129
107struct cipher_alg { 130struct cipher_alg {
108 unsigned int cia_min_keysize; 131 unsigned int cia_min_keysize;
109 unsigned int cia_max_keysize; 132 unsigned int cia_max_keysize;
@@ -143,6 +166,7 @@ struct compress_alg {
143 unsigned int slen, u8 *dst, unsigned int *dlen); 166 unsigned int slen, u8 *dst, unsigned int *dlen);
144}; 167};
145 168
169#define cra_blkcipher cra_u.blkcipher
146#define cra_cipher cra_u.cipher 170#define cra_cipher cra_u.cipher
147#define cra_digest cra_u.digest 171#define cra_digest cra_u.digest
148#define cra_compress cra_u.compress 172#define cra_compress cra_u.compress
@@ -165,6 +189,7 @@ struct crypto_alg {
165 const struct crypto_type *cra_type; 189 const struct crypto_type *cra_type;
166 190
167 union { 191 union {
192 struct blkcipher_alg blkcipher;
168 struct cipher_alg cipher; 193 struct cipher_alg cipher;
169 struct digest_alg digest; 194 struct digest_alg digest;
170 struct compress_alg compress; 195 struct compress_alg compress;
@@ -201,6 +226,16 @@ static inline int crypto_alg_available(const char *name, u32 flags)
201 * crypto_free_*(), as well as the various helpers below. 226 * crypto_free_*(), as well as the various helpers below.
202 */ 227 */
203 228
229struct blkcipher_tfm {
230 void *iv;
231 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
232 unsigned int keylen);
233 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
234 struct scatterlist *src, unsigned int nbytes);
235 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
236 struct scatterlist *src, unsigned int nbytes);
237};
238
204struct cipher_tfm { 239struct cipher_tfm {
205 void *cit_iv; 240 void *cit_iv;
206 unsigned int cit_ivsize; 241 unsigned int cit_ivsize;
@@ -251,6 +286,7 @@ struct compress_tfm {
251 u8 *dst, unsigned int *dlen); 286 u8 *dst, unsigned int *dlen);
252}; 287};
253 288
289#define crt_blkcipher crt_u.blkcipher
254#define crt_cipher crt_u.cipher 290#define crt_cipher crt_u.cipher
255#define crt_digest crt_u.digest 291#define crt_digest crt_u.digest
256#define crt_compress crt_u.compress 292#define crt_compress crt_u.compress
@@ -260,6 +296,7 @@ struct crypto_tfm {
260 u32 crt_flags; 296 u32 crt_flags;
261 297
262 union { 298 union {
299 struct blkcipher_tfm blkcipher;
263 struct cipher_tfm cipher; 300 struct cipher_tfm cipher;
264 struct digest_tfm digest; 301 struct digest_tfm digest;
265 struct compress_tfm compress; 302 struct compress_tfm compress;
@@ -272,6 +309,10 @@ struct crypto_tfm {
272 309
273#define crypto_cipher crypto_tfm 310#define crypto_cipher crypto_tfm
274 311
312struct crypto_blkcipher {
313 struct crypto_tfm base;
314};
315
275enum { 316enum {
276 CRYPTOA_UNSPEC, 317 CRYPTOA_UNSPEC,
277 CRYPTOA_ALG, 318 CRYPTOA_ALG,
@@ -380,6 +421,144 @@ static inline unsigned int crypto_tfm_ctx_alignment(void)
380/* 421/*
381 * API wrappers. 422 * API wrappers.
382 */ 423 */
424static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
425 struct crypto_tfm *tfm)
426{
427 return (struct crypto_blkcipher *)tfm;
428}
429
430static inline struct crypto_blkcipher *crypto_blkcipher_cast(
431 struct crypto_tfm *tfm)
432{
433 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
434 return __crypto_blkcipher_cast(tfm);
435}
436
437static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
438 const char *alg_name, u32 type, u32 mask)
439{
440 type &= ~CRYPTO_ALG_TYPE_MASK;
441 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
442 mask |= CRYPTO_ALG_TYPE_MASK;
443
444 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
445}
446
447static inline struct crypto_tfm *crypto_blkcipher_tfm(
448 struct crypto_blkcipher *tfm)
449{
450 return &tfm->base;
451}
452
453static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
454{
455 crypto_free_tfm(crypto_blkcipher_tfm(tfm));
456}
457
458static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
459{
460 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
461}
462
463static inline struct blkcipher_tfm *crypto_blkcipher_crt(
464 struct crypto_blkcipher *tfm)
465{
466 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
467}
468
469static inline struct blkcipher_alg *crypto_blkcipher_alg(
470 struct crypto_blkcipher *tfm)
471{
472 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
473}
474
475static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
476{
477 return crypto_blkcipher_alg(tfm)->ivsize;
478}
479
480static inline unsigned int crypto_blkcipher_blocksize(
481 struct crypto_blkcipher *tfm)
482{
483 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
484}
485
486static inline unsigned int crypto_blkcipher_alignmask(
487 struct crypto_blkcipher *tfm)
488{
489 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
490}
491
492static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
493{
494 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
495}
496
497static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
498 u32 flags)
499{
500 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
501}
502
503static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
504 u32 flags)
505{
506 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
507}
508
509static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
510 const u8 *key, unsigned int keylen)
511{
512 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
513 key, keylen);
514}
515
516static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
517 struct scatterlist *dst,
518 struct scatterlist *src,
519 unsigned int nbytes)
520{
521 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
522 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
523}
524
525static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
526 struct scatterlist *dst,
527 struct scatterlist *src,
528 unsigned int nbytes)
529{
530 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
531}
532
533static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
534 struct scatterlist *dst,
535 struct scatterlist *src,
536 unsigned int nbytes)
537{
538 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
539 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
540}
541
542static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
543 struct scatterlist *dst,
544 struct scatterlist *src,
545 unsigned int nbytes)
546{
547 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
548}
549
550static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
551 const u8 *src, unsigned int len)
552{
553 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
554}
555
556static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
557 u8 *dst, unsigned int len)
558{
559 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
560}
561
383static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm) 562static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
384{ 563{
385 return (struct crypto_cipher *)tfm; 564 return (struct crypto_cipher *)tfm;