diff options
-rw-r--r-- | crypto/keywrap.c | 419 |
1 files changed, 419 insertions, 0 deletions
diff --git a/crypto/keywrap.c b/crypto/keywrap.c new file mode 100644 index 000000000000..b1d106ce55f3 --- /dev/null +++ b/crypto/keywrap.c | |||
@@ -0,0 +1,419 @@ | |||
1 | /* | ||
2 | * Key Wrapping: RFC3394 / NIST SP800-38F | ||
3 | * | ||
4 | * Copyright (C) 2015, Stephan Mueller <smueller@chronox.de> | ||
5 | * | ||
6 | * Redistribution and use in source and binary forms, with or without | ||
7 | * modification, are permitted provided that the following conditions | ||
8 | * are met: | ||
9 | * 1. Redistributions of source code must retain the above copyright | ||
10 | * notice, and the entire permission notice in its entirety, | ||
11 | * including the disclaimer of warranties. | ||
12 | * 2. Redistributions in binary form must reproduce the above copyright | ||
13 | * notice, this list of conditions and the following disclaimer in the | ||
14 | * documentation and/or other materials provided with the distribution. | ||
15 | * 3. The name of the author may not be used to endorse or promote | ||
16 | * products derived from this software without specific prior | ||
17 | * written permission. | ||
18 | * | ||
19 | * ALTERNATIVELY, this product may be distributed under the terms of | ||
20 | * the GNU General Public License, in which case the provisions of the GPL2 | ||
21 | * are required INSTEAD OF the above restrictions. (This clause is | ||
22 | * necessary due to a potential bad interaction between the GPL and | ||
23 | * the restrictions contained in a BSD-style copyright.) | ||
24 | * | ||
25 | * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED | ||
26 | * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES | ||
27 | * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ALL OF | ||
28 | * WHICH ARE HEREBY DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE | ||
29 | * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | ||
30 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT | ||
31 | * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR | ||
32 | * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF | ||
33 | * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||
34 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE | ||
35 | * USE OF THIS SOFTWARE, EVEN IF NOT ADVISED OF THE POSSIBILITY OF SUCH | ||
36 | * DAMAGE. | ||
37 | */ | ||
38 | |||
39 | /* | ||
40 | * Note for using key wrapping: | ||
41 | * | ||
42 | * * The result of the encryption operation is the ciphertext starting | ||
43 | * with the 2nd semiblock. The first semiblock is provided as the IV. | ||
44 | * The IV used to start the encryption operation is the default IV. | ||
45 | * | ||
46 | * * The input for the decryption is the first semiblock handed in as an | ||
47 | * IV. The ciphertext is the data starting with the 2nd semiblock. The | ||
48 | * return code of the decryption operation will be EBADMSG in case an | ||
49 | * integrity error occurs. | ||
50 | * | ||
51 | * To obtain the full result of an encryption as expected by SP800-38F, the | ||
52 | * caller must allocate a buffer of plaintext + 8 bytes: | ||
53 | * | ||
54 | * unsigned int datalen = ptlen + crypto_skcipher_ivsize(tfm); | ||
55 | * u8 data[datalen]; | ||
56 | * u8 *iv = data; | ||
57 | * u8 *pt = data + crypto_skcipher_ivsize(tfm); | ||
58 | * <ensure that pt contains the plaintext of size ptlen> | ||
59 | * sg_init_one(&sg, ptdata, ptlen); | ||
60 | * skcipher_request_set_crypt(req, &sg, &sg, ptlen, iv); | ||
61 | * | ||
62 | * ==> After encryption, data now contains full KW result as per SP800-38F. | ||
63 | * | ||
64 | * In case of decryption, ciphertext now already has the expected length | ||
65 | * and must be segmented appropriately: | ||
66 | * | ||
67 | * unsigned int datalen = CTLEN; | ||
68 | * u8 data[datalen]; | ||
69 | * <ensure that data contains full ciphertext> | ||
70 | * u8 *iv = data; | ||
71 | * u8 *ct = data + crypto_skcipher_ivsize(tfm); | ||
72 | * unsigned int ctlen = datalen - crypto_skcipher_ivsize(tfm); | ||
73 | * sg_init_one(&sg, ctdata, ctlen); | ||
74 | * skcipher_request_set_crypt(req, &sg, &sg, ptlen, iv); | ||
75 | * | ||
76 | * ==> After decryption (which hopefully does not return EBADMSG), the ct | ||
77 | * pointer now points to the plaintext of size ctlen. | ||
78 | * | ||
79 | * Note 2: KWP is not implemented as this would defy in-place operation. | ||
80 | * If somebody wants to wrap non-aligned data, he should simply pad | ||
81 | * the input with zeros to fill it up to the 8 byte boundary. | ||
82 | */ | ||
83 | |||
84 | #include <linux/module.h> | ||
85 | #include <linux/crypto.h> | ||
86 | #include <linux/scatterlist.h> | ||
87 | #include <crypto/scatterwalk.h> | ||
88 | #include <crypto/internal/skcipher.h> | ||
89 | |||
90 | struct crypto_kw_ctx { | ||
91 | struct crypto_cipher *child; | ||
92 | }; | ||
93 | |||
94 | struct crypto_kw_block { | ||
95 | #define SEMIBSIZE 8 | ||
96 | u8 A[SEMIBSIZE]; | ||
97 | u8 R[SEMIBSIZE]; | ||
98 | }; | ||
99 | |||
100 | /* convert 64 bit integer into its string representation */ | ||
101 | static inline void crypto_kw_cpu_to_be64(u64 val, u8 *buf) | ||
102 | { | ||
103 | __be64 *a = (__be64 *)buf; | ||
104 | |||
105 | *a = cpu_to_be64(val); | ||
106 | } | ||
107 | |||
108 | /* | ||
109 | * Fast forward the SGL to the "end" length minus SEMIBSIZE. | ||
110 | * The start in the SGL defined by the fast-forward is returned with | ||
111 | * the walk variable | ||
112 | */ | ||
113 | static void crypto_kw_scatterlist_ff(struct scatter_walk *walk, | ||
114 | struct scatterlist *sg, | ||
115 | unsigned int end) | ||
116 | { | ||
117 | unsigned int skip = 0; | ||
118 | |||
119 | /* The caller should only operate on full SEMIBLOCKs. */ | ||
120 | BUG_ON(end < SEMIBSIZE); | ||
121 | |||
122 | skip = end - SEMIBSIZE; | ||
123 | while (sg) { | ||
124 | if (sg->length > skip) { | ||
125 | scatterwalk_start(walk, sg); | ||
126 | scatterwalk_advance(walk, skip); | ||
127 | break; | ||
128 | } else | ||
129 | skip -= sg->length; | ||
130 | |||
131 | sg = sg_next(sg); | ||
132 | } | ||
133 | } | ||
134 | |||
135 | static int crypto_kw_decrypt(struct blkcipher_desc *desc, | ||
136 | struct scatterlist *dst, struct scatterlist *src, | ||
137 | unsigned int nbytes) | ||
138 | { | ||
139 | struct crypto_blkcipher *tfm = desc->tfm; | ||
140 | struct crypto_kw_ctx *ctx = crypto_blkcipher_ctx(tfm); | ||
141 | struct crypto_cipher *child = ctx->child; | ||
142 | |||
143 | unsigned long alignmask = max_t(unsigned long, SEMIBSIZE, | ||
144 | crypto_cipher_alignmask(child)); | ||
145 | unsigned int i; | ||
146 | |||
147 | u8 blockbuf[sizeof(struct crypto_kw_block) + alignmask]; | ||
148 | struct crypto_kw_block *block = (struct crypto_kw_block *) | ||
149 | PTR_ALIGN(blockbuf + 0, alignmask + 1); | ||
150 | |||
151 | u64 t = 6 * ((nbytes) >> 3); | ||
152 | struct scatterlist *lsrc, *ldst; | ||
153 | int ret = 0; | ||
154 | |||
155 | /* | ||
156 | * Require at least 2 semiblocks (note, the 3rd semiblock that is | ||
157 | * required by SP800-38F is the IV. | ||
158 | */ | ||
159 | if (nbytes < (2 * SEMIBSIZE) || nbytes % SEMIBSIZE) | ||
160 | return -EINVAL; | ||
161 | |||
162 | /* Place the IV into block A */ | ||
163 | memcpy(block->A, desc->info, SEMIBSIZE); | ||
164 | |||
165 | /* | ||
166 | * src scatterlist is read-only. dst scatterlist is r/w. During the | ||
167 | * first loop, lsrc points to src and ldst to dst. For any | ||
168 | * subsequent round, the code operates on dst only. | ||
169 | */ | ||
170 | lsrc = src; | ||
171 | ldst = dst; | ||
172 | |||
173 | for (i = 0; i < 6; i++) { | ||
174 | u8 tbe_buffer[SEMIBSIZE + alignmask]; | ||
175 | /* alignment for the crypto_xor and the _to_be64 operation */ | ||
176 | u8 *tbe = PTR_ALIGN(tbe_buffer + 0, alignmask + 1); | ||
177 | unsigned int tmp_nbytes = nbytes; | ||
178 | struct scatter_walk src_walk, dst_walk; | ||
179 | |||
180 | while (tmp_nbytes) { | ||
181 | /* move pointer by tmp_nbytes in the SGL */ | ||
182 | crypto_kw_scatterlist_ff(&src_walk, lsrc, tmp_nbytes); | ||
183 | /* get the source block */ | ||
184 | scatterwalk_copychunks(block->R, &src_walk, SEMIBSIZE, | ||
185 | false); | ||
186 | |||
187 | /* perform KW operation: get counter as byte string */ | ||
188 | crypto_kw_cpu_to_be64(t, tbe); | ||
189 | /* perform KW operation: modify IV with counter */ | ||
190 | crypto_xor(block->A, tbe, SEMIBSIZE); | ||
191 | t--; | ||
192 | /* perform KW operation: decrypt block */ | ||
193 | crypto_cipher_decrypt_one(child, (u8*)block, | ||
194 | (u8*)block); | ||
195 | |||
196 | /* move pointer by tmp_nbytes in the SGL */ | ||
197 | crypto_kw_scatterlist_ff(&dst_walk, ldst, tmp_nbytes); | ||
198 | /* Copy block->R into place */ | ||
199 | scatterwalk_copychunks(block->R, &dst_walk, SEMIBSIZE, | ||
200 | true); | ||
201 | |||
202 | tmp_nbytes -= SEMIBSIZE; | ||
203 | } | ||
204 | |||
205 | /* we now start to operate on the dst SGL only */ | ||
206 | lsrc = dst; | ||
207 | ldst = dst; | ||
208 | } | ||
209 | |||
210 | /* Perform authentication check */ | ||
211 | if (crypto_memneq("\xA6\xA6\xA6\xA6\xA6\xA6\xA6\xA6", block->A, | ||
212 | SEMIBSIZE)) | ||
213 | ret = -EBADMSG; | ||
214 | |||
215 | memzero_explicit(&block, sizeof(struct crypto_kw_block)); | ||
216 | |||
217 | return ret; | ||
218 | } | ||
219 | |||
220 | static int crypto_kw_encrypt(struct blkcipher_desc *desc, | ||
221 | struct scatterlist *dst, struct scatterlist *src, | ||
222 | unsigned int nbytes) | ||
223 | { | ||
224 | struct crypto_blkcipher *tfm = desc->tfm; | ||
225 | struct crypto_kw_ctx *ctx = crypto_blkcipher_ctx(tfm); | ||
226 | struct crypto_cipher *child = ctx->child; | ||
227 | |||
228 | unsigned long alignmask = max_t(unsigned long, SEMIBSIZE, | ||
229 | crypto_cipher_alignmask(child)); | ||
230 | unsigned int i; | ||
231 | |||
232 | u8 blockbuf[sizeof(struct crypto_kw_block) + alignmask]; | ||
233 | struct crypto_kw_block *block = (struct crypto_kw_block *) | ||
234 | PTR_ALIGN(blockbuf + 0, alignmask + 1); | ||
235 | |||
236 | u64 t = 1; | ||
237 | struct scatterlist *lsrc, *ldst; | ||
238 | |||
239 | /* | ||
240 | * Require at least 2 semiblocks (note, the 3rd semiblock that is | ||
241 | * required by SP800-38F is the IV that occupies the first semiblock. | ||
242 | * This means that the dst memory must be one semiblock larger than src. | ||
243 | * Also ensure that the given data is aligned to semiblock. | ||
244 | */ | ||
245 | if (nbytes < (2 * SEMIBSIZE) || nbytes % SEMIBSIZE) | ||
246 | return -EINVAL; | ||
247 | |||
248 | /* | ||
249 | * Place the predefined IV into block A -- for encrypt, the caller | ||
250 | * does not need to provide an IV, but he needs to fetch the final IV. | ||
251 | */ | ||
252 | memcpy(block->A, "\xA6\xA6\xA6\xA6\xA6\xA6\xA6\xA6", SEMIBSIZE); | ||
253 | |||
254 | /* | ||
255 | * src scatterlist is read-only. dst scatterlist is r/w. During the | ||
256 | * first loop, lsrc points to src and ldst to dst. For any | ||
257 | * subsequent round, the code operates on dst only. | ||
258 | */ | ||
259 | lsrc = src; | ||
260 | ldst = dst; | ||
261 | |||
262 | for (i = 0; i < 6; i++) { | ||
263 | u8 tbe_buffer[SEMIBSIZE + alignmask]; | ||
264 | u8 *tbe = PTR_ALIGN(tbe_buffer + 0, alignmask + 1); | ||
265 | unsigned int tmp_nbytes = nbytes; | ||
266 | struct scatter_walk src_walk, dst_walk; | ||
267 | |||
268 | scatterwalk_start(&src_walk, lsrc); | ||
269 | scatterwalk_start(&dst_walk, ldst); | ||
270 | |||
271 | while (tmp_nbytes) { | ||
272 | /* get the source block */ | ||
273 | scatterwalk_copychunks(block->R, &src_walk, SEMIBSIZE, | ||
274 | false); | ||
275 | |||
276 | /* perform KW operation: encrypt block */ | ||
277 | crypto_cipher_encrypt_one(child, (u8 *)block, | ||
278 | (u8 *)block); | ||
279 | /* perform KW operation: get counter as byte string */ | ||
280 | crypto_kw_cpu_to_be64(t, tbe); | ||
281 | /* perform KW operation: modify IV with counter */ | ||
282 | crypto_xor(block->A, tbe, SEMIBSIZE); | ||
283 | t++; | ||
284 | |||
285 | /* Copy block->R into place */ | ||
286 | scatterwalk_copychunks(block->R, &dst_walk, SEMIBSIZE, | ||
287 | true); | ||
288 | |||
289 | tmp_nbytes -= SEMIBSIZE; | ||
290 | } | ||
291 | |||
292 | /* we now start to operate on the dst SGL only */ | ||
293 | lsrc = dst; | ||
294 | ldst = dst; | ||
295 | } | ||
296 | |||
297 | /* establish the IV for the caller to pick up */ | ||
298 | memcpy(desc->info, block->A, SEMIBSIZE); | ||
299 | |||
300 | memzero_explicit(&block, sizeof(struct crypto_kw_block)); | ||
301 | |||
302 | return 0; | ||
303 | } | ||
304 | |||
305 | static int crypto_kw_setkey(struct crypto_tfm *parent, const u8 *key, | ||
306 | unsigned int keylen) | ||
307 | { | ||
308 | struct crypto_kw_ctx *ctx = crypto_tfm_ctx(parent); | ||
309 | struct crypto_cipher *child = ctx->child; | ||
310 | int err; | ||
311 | |||
312 | crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); | ||
313 | crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & | ||
314 | CRYPTO_TFM_REQ_MASK); | ||
315 | err = crypto_cipher_setkey(child, key, keylen); | ||
316 | crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & | ||
317 | CRYPTO_TFM_RES_MASK); | ||
318 | return err; | ||
319 | } | ||
320 | |||
321 | static int crypto_kw_init_tfm(struct crypto_tfm *tfm) | ||
322 | { | ||
323 | struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); | ||
324 | struct crypto_spawn *spawn = crypto_instance_ctx(inst); | ||
325 | struct crypto_kw_ctx *ctx = crypto_tfm_ctx(tfm); | ||
326 | struct crypto_cipher *cipher; | ||
327 | |||
328 | cipher = crypto_spawn_cipher(spawn); | ||
329 | if (IS_ERR(cipher)) | ||
330 | return PTR_ERR(cipher); | ||
331 | |||
332 | ctx->child = cipher; | ||
333 | return 0; | ||
334 | } | ||
335 | |||
336 | static void crypto_kw_exit_tfm(struct crypto_tfm *tfm) | ||
337 | { | ||
338 | struct crypto_kw_ctx *ctx = crypto_tfm_ctx(tfm); | ||
339 | |||
340 | crypto_free_cipher(ctx->child); | ||
341 | } | ||
342 | |||
343 | static struct crypto_instance *crypto_kw_alloc(struct rtattr **tb) | ||
344 | { | ||
345 | struct crypto_instance *inst = NULL; | ||
346 | struct crypto_alg *alg = NULL; | ||
347 | int err; | ||
348 | |||
349 | err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); | ||
350 | if (err) | ||
351 | return ERR_PTR(err); | ||
352 | |||
353 | alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, | ||
354 | CRYPTO_ALG_TYPE_MASK); | ||
355 | if (IS_ERR(alg)) | ||
356 | return ERR_CAST(alg); | ||
357 | |||
358 | inst = ERR_PTR(-EINVAL); | ||
359 | /* Section 5.1 requirement for KW */ | ||
360 | if (alg->cra_blocksize != sizeof(struct crypto_kw_block)) | ||
361 | goto err; | ||
362 | |||
363 | inst = crypto_alloc_instance("kw", alg); | ||
364 | if (IS_ERR(inst)) | ||
365 | goto err; | ||
366 | |||
367 | inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; | ||
368 | inst->alg.cra_priority = alg->cra_priority; | ||
369 | inst->alg.cra_blocksize = SEMIBSIZE; | ||
370 | inst->alg.cra_alignmask = 0; | ||
371 | inst->alg.cra_type = &crypto_blkcipher_type; | ||
372 | inst->alg.cra_blkcipher.ivsize = SEMIBSIZE; | ||
373 | inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; | ||
374 | inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize; | ||
375 | |||
376 | inst->alg.cra_ctxsize = sizeof(struct crypto_kw_ctx); | ||
377 | |||
378 | inst->alg.cra_init = crypto_kw_init_tfm; | ||
379 | inst->alg.cra_exit = crypto_kw_exit_tfm; | ||
380 | |||
381 | inst->alg.cra_blkcipher.setkey = crypto_kw_setkey; | ||
382 | inst->alg.cra_blkcipher.encrypt = crypto_kw_encrypt; | ||
383 | inst->alg.cra_blkcipher.decrypt = crypto_kw_decrypt; | ||
384 | |||
385 | err: | ||
386 | crypto_mod_put(alg); | ||
387 | return inst; | ||
388 | } | ||
389 | |||
390 | static void crypto_kw_free(struct crypto_instance *inst) | ||
391 | { | ||
392 | crypto_drop_spawn(crypto_instance_ctx(inst)); | ||
393 | kfree(inst); | ||
394 | } | ||
395 | |||
396 | static struct crypto_template crypto_kw_tmpl = { | ||
397 | .name = "kw", | ||
398 | .alloc = crypto_kw_alloc, | ||
399 | .free = crypto_kw_free, | ||
400 | .module = THIS_MODULE, | ||
401 | }; | ||
402 | |||
403 | static int __init crypto_kw_init(void) | ||
404 | { | ||
405 | return crypto_register_template(&crypto_kw_tmpl); | ||
406 | } | ||
407 | |||
408 | static void __exit crypto_kw_exit(void) | ||
409 | { | ||
410 | crypto_unregister_template(&crypto_kw_tmpl); | ||
411 | } | ||
412 | |||
413 | module_init(crypto_kw_init); | ||
414 | module_exit(crypto_kw_exit); | ||
415 | |||
416 | MODULE_LICENSE("Dual BSD/GPL"); | ||
417 | MODULE_AUTHOR("Stephan Mueller <smueller@chronox.de>"); | ||
418 | MODULE_DESCRIPTION("Key Wrapping (RFC3394 / NIST SP800-38F)"); | ||
419 | MODULE_ALIAS_CRYPTO("kw"); | ||