diff options
author | Kazunori MIYAZAWA <miyazawa@linux-ipv6.org> | 2006-10-27 23:15:24 -0400 |
---|---|---|
committer | David S. Miller <davem@sunset.davemloft.net> | 2006-12-06 21:38:49 -0500 |
commit | 333b0d7eeacbd47159daf23757aa81368470c409 (patch) | |
tree | e05f1a3bed92833ab4d5f7c5dcc75ab8d8fcf245 /crypto | |
parent | 45789328e5aa2de96d4467e4445418364e5378d7 (diff) |
[CRYPTO] xcbc: New algorithm
This is core code of XCBC.
XCBC is an algorithm that forms a MAC algorithm out of a cipher algorithm.
For example, AES-XCBC-MAC is a MAC algorithm based on the AES cipher
algorithm.
Signed-off-by: Kazunori MIYAZAWA <miyazawa@linux-ipv6.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/Kconfig | 11 | ||||
-rw-r--r-- | crypto/Makefile | 1 | ||||
-rw-r--r-- | crypto/xcbc.c | 346 |
3 files changed, 358 insertions, 0 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig index cbae8392ce11..4495e46660bf 100644 --- a/crypto/Kconfig +++ b/crypto/Kconfig | |||
@@ -39,6 +39,17 @@ config CRYPTO_HMAC | |||
39 | HMAC: Keyed-Hashing for Message Authentication (RFC2104). | 39 | HMAC: Keyed-Hashing for Message Authentication (RFC2104). |
40 | This is required for IPSec. | 40 | This is required for IPSec. |
41 | 41 | ||
42 | config CRYPTO_XCBC | ||
43 | tristate "XCBC support" | ||
44 | depends on EXPERIMENTAL | ||
45 | select CRYPTO_HASH | ||
46 | select CRYPTO_MANAGER | ||
47 | help | ||
48 | XCBC: Keyed-Hashing with encryption algorithm | ||
49 | http://www.ietf.org/rfc/rfc3566.txt | ||
50 | http://csrc.nist.gov/encryption/modes/proposedmodes/ | ||
51 | xcbc-mac/xcbc-mac-spec.pdf | ||
52 | |||
42 | config CRYPTO_NULL | 53 | config CRYPTO_NULL |
43 | tristate "Null algorithms" | 54 | tristate "Null algorithms" |
44 | select CRYPTO_ALGAPI | 55 | select CRYPTO_ALGAPI |
diff --git a/crypto/Makefile b/crypto/Makefile index 72366208e291..aba9625fb429 100644 --- a/crypto/Makefile +++ b/crypto/Makefile | |||
@@ -15,6 +15,7 @@ obj-$(CONFIG_CRYPTO_HASH) += crypto_hash.o | |||
15 | 15 | ||
16 | obj-$(CONFIG_CRYPTO_MANAGER) += cryptomgr.o | 16 | obj-$(CONFIG_CRYPTO_MANAGER) += cryptomgr.o |
17 | obj-$(CONFIG_CRYPTO_HMAC) += hmac.o | 17 | obj-$(CONFIG_CRYPTO_HMAC) += hmac.o |
18 | obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o | ||
18 | obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o | 19 | obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o |
19 | obj-$(CONFIG_CRYPTO_MD4) += md4.o | 20 | obj-$(CONFIG_CRYPTO_MD4) += md4.o |
20 | obj-$(CONFIG_CRYPTO_MD5) += md5.o | 21 | obj-$(CONFIG_CRYPTO_MD5) += md5.o |
diff --git a/crypto/xcbc.c b/crypto/xcbc.c new file mode 100644 index 000000000000..f5929501bd48 --- /dev/null +++ b/crypto/xcbc.c | |||
@@ -0,0 +1,346 @@ | |||
1 | /* | ||
2 | * Copyright (C)2006 USAGI/WIDE Project | ||
3 | * | ||
4 | * This program is free software; you can redistribute it and/or modify | ||
5 | * it under the terms of the GNU General Public License as published by | ||
6 | * the Free Software Foundation; either version 2 of the License, or | ||
7 | * (at your option) any later version. | ||
8 | * | ||
9 | * This program is distributed in the hope that it will be useful, | ||
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
12 | * GNU General Public License for more details. | ||
13 | * | ||
14 | * You should have received a copy of the GNU General Public License | ||
15 | * along with this program; if not, write to the Free Software | ||
16 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | ||
17 | * | ||
18 | * Author: | ||
19 | * Kazunori Miyazawa <miyazawa@linux-ipv6.org> | ||
20 | */ | ||
21 | |||
22 | #include <linux/crypto.h> | ||
23 | #include <linux/err.h> | ||
24 | #include <linux/kernel.h> | ||
25 | #include <linux/mm.h> | ||
26 | #include <linux/rtnetlink.h> | ||
27 | #include <linux/slab.h> | ||
28 | #include <linux/scatterlist.h> | ||
29 | #include "internal.h" | ||
30 | |||
31 | u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101, | ||
32 | 0x02020202, 0x02020202, 0x02020202, 0x02020202, | ||
33 | 0x03030303, 0x03030303, 0x03030303, 0x03030303}; | ||
34 | /* | ||
35 | * +------------------------ | ||
36 | * | <parent tfm> | ||
37 | * +------------------------ | ||
38 | * | crypto_xcbc_ctx | ||
39 | * +------------------------ | ||
40 | * | odds (block size) | ||
41 | * +------------------------ | ||
42 | * | prev (block size) | ||
43 | * +------------------------ | ||
44 | * | key (block size) | ||
45 | * +------------------------ | ||
46 | * | consts (block size * 3) | ||
47 | * +------------------------ | ||
48 | */ | ||
49 | struct crypto_xcbc_ctx { | ||
50 | struct crypto_tfm *child; | ||
51 | u8 *odds; | ||
52 | u8 *prev; | ||
53 | u8 *key; | ||
54 | u8 *consts; | ||
55 | void (*xor)(u8 *a, const u8 *b, unsigned int bs); | ||
56 | unsigned int keylen; | ||
57 | unsigned int len; | ||
58 | }; | ||
59 | |||
60 | static void xor_128(u8 *a, const u8 *b, unsigned int bs) | ||
61 | { | ||
62 | ((u32 *)a)[0] ^= ((u32 *)b)[0]; | ||
63 | ((u32 *)a)[1] ^= ((u32 *)b)[1]; | ||
64 | ((u32 *)a)[2] ^= ((u32 *)b)[2]; | ||
65 | ((u32 *)a)[3] ^= ((u32 *)b)[3]; | ||
66 | } | ||
67 | |||
68 | static int _crypto_xcbc_digest_setkey(struct crypto_hash *parent, | ||
69 | struct crypto_xcbc_ctx *ctx) | ||
70 | { | ||
71 | int bs = crypto_hash_blocksize(parent); | ||
72 | int err = 0; | ||
73 | u8 key1[bs]; | ||
74 | |||
75 | if ((err = crypto_cipher_setkey(ctx->child, ctx->key, ctx->keylen))) | ||
76 | return err; | ||
77 | |||
78 | ctx->child->__crt_alg->cra_cipher.cia_encrypt(ctx->child, key1, | ||
79 | ctx->consts); | ||
80 | |||
81 | return crypto_cipher_setkey(ctx->child, key1, bs); | ||
82 | } | ||
83 | |||
84 | static int crypto_xcbc_digest_setkey(struct crypto_hash *parent, | ||
85 | const u8 *inkey, unsigned int keylen) | ||
86 | { | ||
87 | struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent); | ||
88 | |||
89 | if (keylen != crypto_tfm_alg_blocksize(ctx->child)) | ||
90 | return -EINVAL; | ||
91 | |||
92 | ctx->keylen = keylen; | ||
93 | memcpy(ctx->key, inkey, keylen); | ||
94 | ctx->consts = (u8*)ks; | ||
95 | |||
96 | return _crypto_xcbc_digest_setkey(parent, ctx); | ||
97 | } | ||
98 | |||
99 | int crypto_xcbc_digest_init(struct hash_desc *pdesc) | ||
100 | { | ||
101 | struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(pdesc->tfm); | ||
102 | int bs = crypto_hash_blocksize(pdesc->tfm); | ||
103 | |||
104 | ctx->len = 0; | ||
105 | memset(ctx->odds, 0, bs); | ||
106 | memset(ctx->prev, 0, bs); | ||
107 | |||
108 | return 0; | ||
109 | } | ||
110 | |||
111 | int crypto_xcbc_digest_update(struct hash_desc *pdesc, struct scatterlist *sg, unsigned int nbytes) | ||
112 | { | ||
113 | struct crypto_hash *parent = pdesc->tfm; | ||
114 | struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent); | ||
115 | struct crypto_tfm *tfm = ctx->child; | ||
116 | int bs = crypto_hash_blocksize(parent); | ||
117 | unsigned int i = 0; | ||
118 | |||
119 | do { | ||
120 | |||
121 | struct page *pg = sg[i].page; | ||
122 | unsigned int offset = sg[i].offset; | ||
123 | unsigned int slen = sg[i].length; | ||
124 | |||
125 | while (slen > 0) { | ||
126 | unsigned int len = min(slen, ((unsigned int)(PAGE_SIZE)) - offset); | ||
127 | char *p = crypto_kmap(pg, 0) + offset; | ||
128 | |||
129 | /* checking the data can fill the block */ | ||
130 | if ((ctx->len + len) <= bs) { | ||
131 | memcpy(ctx->odds + ctx->len, p, len); | ||
132 | ctx->len += len; | ||
133 | slen -= len; | ||
134 | |||
135 | /* checking the rest of the page */ | ||
136 | if (len + offset >= PAGE_SIZE) { | ||
137 | offset = 0; | ||
138 | pg++; | ||
139 | } else | ||
140 | offset += len; | ||
141 | |||
142 | crypto_kunmap(p, 0); | ||
143 | crypto_yield(tfm->crt_flags); | ||
144 | continue; | ||
145 | } | ||
146 | |||
147 | /* filling odds with new data and encrypting it */ | ||
148 | memcpy(ctx->odds + ctx->len, p, bs - ctx->len); | ||
149 | len -= bs - ctx->len; | ||
150 | p += bs - ctx->len; | ||
151 | |||
152 | ctx->xor(ctx->prev, ctx->odds, bs); | ||
153 | tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, ctx->prev, ctx->prev); | ||
154 | |||
155 | /* clearing the length */ | ||
156 | ctx->len = 0; | ||
157 | |||
158 | /* encrypting the rest of data */ | ||
159 | while (len > bs) { | ||
160 | ctx->xor(ctx->prev, p, bs); | ||
161 | tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, ctx->prev, ctx->prev); | ||
162 | p += bs; | ||
163 | len -= bs; | ||
164 | } | ||
165 | |||
166 | /* keeping the surplus of blocksize */ | ||
167 | if (len) { | ||
168 | memcpy(ctx->odds, p, len); | ||
169 | ctx->len = len; | ||
170 | } | ||
171 | crypto_kunmap(p, 0); | ||
172 | crypto_yield(tfm->crt_flags); | ||
173 | slen -= min(slen, ((unsigned int)(PAGE_SIZE)) - offset); | ||
174 | offset = 0; | ||
175 | pg++; | ||
176 | } | ||
177 | nbytes-=sg[i].length; | ||
178 | i++; | ||
179 | } while (nbytes>0); | ||
180 | |||
181 | return 0; | ||
182 | } | ||
183 | |||
184 | int crypto_xcbc_digest_final(struct hash_desc *pdesc, u8 *out) | ||
185 | { | ||
186 | struct crypto_hash *parent = pdesc->tfm; | ||
187 | struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(parent); | ||
188 | struct crypto_tfm *tfm = ctx->child; | ||
189 | int bs = crypto_hash_blocksize(parent); | ||
190 | int err = 0; | ||
191 | |||
192 | if (ctx->len == bs) { | ||
193 | u8 key2[bs]; | ||
194 | |||
195 | if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0) | ||
196 | return err; | ||
197 | |||
198 | tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, key2, (const u8*)(ctx->consts+bs)); | ||
199 | |||
200 | ctx->xor(ctx->prev, ctx->odds, bs); | ||
201 | ctx->xor(ctx->prev, key2, bs); | ||
202 | _crypto_xcbc_digest_setkey(parent, ctx); | ||
203 | |||
204 | tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, out, ctx->prev); | ||
205 | } else { | ||
206 | u8 key3[bs]; | ||
207 | unsigned int rlen; | ||
208 | u8 *p = ctx->odds + ctx->len; | ||
209 | *p = 0x80; | ||
210 | p++; | ||
211 | |||
212 | rlen = bs - ctx->len -1; | ||
213 | if (rlen) | ||
214 | memset(p, 0, rlen); | ||
215 | |||
216 | if ((err = crypto_cipher_setkey(tfm, ctx->key, ctx->keylen)) != 0) | ||
217 | return err; | ||
218 | |||
219 | tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, key3, (const u8*)(ctx->consts+bs*2)); | ||
220 | |||
221 | ctx->xor(ctx->prev, ctx->odds, bs); | ||
222 | ctx->xor(ctx->prev, key3, bs); | ||
223 | |||
224 | _crypto_xcbc_digest_setkey(parent, ctx); | ||
225 | |||
226 | tfm->__crt_alg->cra_cipher.cia_encrypt(tfm, out, ctx->prev); | ||
227 | } | ||
228 | |||
229 | return 0; | ||
230 | } | ||
231 | |||
232 | static int crypto_xcbc_digest(struct hash_desc *pdesc, | ||
233 | struct scatterlist *sg, unsigned int nbytes, u8 *out) | ||
234 | { | ||
235 | crypto_xcbc_digest_init(pdesc); | ||
236 | crypto_xcbc_digest_update(pdesc, sg, nbytes); | ||
237 | return crypto_xcbc_digest_final(pdesc, out); | ||
238 | } | ||
239 | |||
240 | static int xcbc_init_tfm(struct crypto_tfm *tfm) | ||
241 | { | ||
242 | struct crypto_instance *inst = (void *)tfm->__crt_alg; | ||
243 | struct crypto_spawn *spawn = crypto_instance_ctx(inst); | ||
244 | struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm)); | ||
245 | int bs = crypto_hash_blocksize(__crypto_hash_cast(tfm)); | ||
246 | |||
247 | tfm = crypto_spawn_tfm(spawn); | ||
248 | if (IS_ERR(tfm)) | ||
249 | return PTR_ERR(tfm); | ||
250 | |||
251 | switch(bs) { | ||
252 | case 16: | ||
253 | ctx->xor = xor_128; | ||
254 | break; | ||
255 | default: | ||
256 | return -EINVAL; | ||
257 | } | ||
258 | |||
259 | ctx->child = crypto_cipher_cast(tfm); | ||
260 | ctx->odds = (u8*)(ctx+1); | ||
261 | ctx->prev = ctx->odds + bs; | ||
262 | ctx->key = ctx->prev + bs; | ||
263 | |||
264 | return 0; | ||
265 | }; | ||
266 | |||
267 | static void xcbc_exit_tfm(struct crypto_tfm *tfm) | ||
268 | { | ||
269 | struct crypto_xcbc_ctx *ctx = crypto_hash_ctx_aligned(__crypto_hash_cast(tfm)); | ||
270 | crypto_free_cipher(ctx->child); | ||
271 | } | ||
272 | |||
273 | static struct crypto_instance *xcbc_alloc(void *param, unsigned int len) | ||
274 | { | ||
275 | struct crypto_instance *inst; | ||
276 | struct crypto_alg *alg; | ||
277 | alg = crypto_get_attr_alg(param, len, CRYPTO_ALG_TYPE_CIPHER, | ||
278 | CRYPTO_ALG_TYPE_HASH_MASK | CRYPTO_ALG_ASYNC); | ||
279 | if (IS_ERR(alg)) | ||
280 | return ERR_PTR(PTR_ERR(alg)); | ||
281 | |||
282 | switch(alg->cra_blocksize) { | ||
283 | case 16: | ||
284 | break; | ||
285 | default: | ||
286 | return ERR_PTR(PTR_ERR(alg)); | ||
287 | } | ||
288 | |||
289 | inst = crypto_alloc_instance("xcbc", alg); | ||
290 | if (IS_ERR(inst)) | ||
291 | goto out_put_alg; | ||
292 | |||
293 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_HASH; | ||
294 | inst->alg.cra_priority = alg->cra_priority; | ||
295 | inst->alg.cra_blocksize = alg->cra_blocksize; | ||
296 | inst->alg.cra_alignmask = alg->cra_alignmask; | ||
297 | inst->alg.cra_type = &crypto_hash_type; | ||
298 | |||
299 | inst->alg.cra_hash.digestsize = | ||
300 | (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == | ||
301 | CRYPTO_ALG_TYPE_HASH ? alg->cra_hash.digestsize : | ||
302 | alg->cra_blocksize; | ||
303 | inst->alg.cra_ctxsize = sizeof(struct crypto_xcbc_ctx) + | ||
304 | ALIGN(inst->alg.cra_blocksize * 3, sizeof(void *)); | ||
305 | inst->alg.cra_init = xcbc_init_tfm; | ||
306 | inst->alg.cra_exit = xcbc_exit_tfm; | ||
307 | |||
308 | inst->alg.cra_hash.init = crypto_xcbc_digest_init; | ||
309 | inst->alg.cra_hash.update = crypto_xcbc_digest_update; | ||
310 | inst->alg.cra_hash.final = crypto_xcbc_digest_final; | ||
311 | inst->alg.cra_hash.digest = crypto_xcbc_digest; | ||
312 | inst->alg.cra_hash.setkey = crypto_xcbc_digest_setkey; | ||
313 | |||
314 | out_put_alg: | ||
315 | crypto_mod_put(alg); | ||
316 | return inst; | ||
317 | } | ||
318 | |||
319 | static void xcbc_free(struct crypto_instance *inst) | ||
320 | { | ||
321 | crypto_drop_spawn(crypto_instance_ctx(inst)); | ||
322 | kfree(inst); | ||
323 | } | ||
324 | |||
325 | static struct crypto_template crypto_xcbc_tmpl = { | ||
326 | .name = "xcbc", | ||
327 | .alloc = xcbc_alloc, | ||
328 | .free = xcbc_free, | ||
329 | .module = THIS_MODULE, | ||
330 | }; | ||
331 | |||
332 | static int __init crypto_xcbc_module_init(void) | ||
333 | { | ||
334 | return crypto_register_template(&crypto_xcbc_tmpl); | ||
335 | } | ||
336 | |||
337 | static void __exit crypto_xcbc_module_exit(void) | ||
338 | { | ||
339 | crypto_unregister_template(&crypto_xcbc_tmpl); | ||
340 | } | ||
341 | |||
342 | module_init(crypto_xcbc_module_init); | ||
343 | module_exit(crypto_xcbc_module_exit); | ||
344 | |||
345 | MODULE_LICENSE("GPL"); | ||
346 | MODULE_DESCRIPTION("XCBC keyed hash algorithm"); | ||