diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2006-08-21 07:39:24 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2006-09-20 21:44:50 -0400 |
commit | a9e62fadf0b02ba4a1d945d1a75652507da94319 (patch) | |
tree | 8e17290e66a3b0200d1a55b1798c81c9bb83e19d /arch | |
parent | 28ce728a90cce3a0c6c0ed00354299de52db94b1 (diff) |
[CRYPTO] s390: Added block cipher versions of CBC/ECB
This patch adds block cipher algorithms for S390. Once all users of the
old cipher type have been converted the existing CBC/ECB non-block cipher
operations will be removed.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/s390/crypto/aes_s390.c | 218 | ||||
-rw-r--r-- | arch/s390/crypto/crypt_s390.h | 1 | ||||
-rw-r--r-- | arch/s390/crypto/des_s390.c | 385 |
3 files changed, 590 insertions, 14 deletions
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index 220300e760d8..8f04b4e41b55 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c | |||
@@ -16,9 +16,9 @@ | |||
16 | * | 16 | * |
17 | */ | 17 | */ |
18 | 18 | ||
19 | #include <crypto/algapi.h> | ||
19 | #include <linux/module.h> | 20 | #include <linux/module.h> |
20 | #include <linux/init.h> | 21 | #include <linux/init.h> |
21 | #include <linux/crypto.h> | ||
22 | #include "crypt_s390.h" | 22 | #include "crypt_s390.h" |
23 | 23 | ||
24 | #define AES_MIN_KEY_SIZE 16 | 24 | #define AES_MIN_KEY_SIZE 16 |
@@ -34,6 +34,8 @@ int has_aes_256 = 0; | |||
34 | struct s390_aes_ctx { | 34 | struct s390_aes_ctx { |
35 | u8 iv[AES_BLOCK_SIZE]; | 35 | u8 iv[AES_BLOCK_SIZE]; |
36 | u8 key[AES_MAX_KEY_SIZE]; | 36 | u8 key[AES_MAX_KEY_SIZE]; |
37 | long enc; | ||
38 | long dec; | ||
37 | int key_len; | 39 | int key_len; |
38 | }; | 40 | }; |
39 | 41 | ||
@@ -244,6 +246,189 @@ static struct crypto_alg aes_alg = { | |||
244 | } | 246 | } |
245 | }; | 247 | }; |
246 | 248 | ||
249 | static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | ||
250 | unsigned int key_len) | ||
251 | { | ||
252 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
253 | |||
254 | switch (key_len) { | ||
255 | case 16: | ||
256 | sctx->enc = KM_AES_128_ENCRYPT; | ||
257 | sctx->dec = KM_AES_128_DECRYPT; | ||
258 | break; | ||
259 | case 24: | ||
260 | sctx->enc = KM_AES_192_ENCRYPT; | ||
261 | sctx->dec = KM_AES_192_DECRYPT; | ||
262 | break; | ||
263 | case 32: | ||
264 | sctx->enc = KM_AES_256_ENCRYPT; | ||
265 | sctx->dec = KM_AES_256_DECRYPT; | ||
266 | break; | ||
267 | } | ||
268 | |||
269 | return aes_set_key(tfm, in_key, key_len); | ||
270 | } | ||
271 | |||
272 | static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param, | ||
273 | struct blkcipher_walk *walk) | ||
274 | { | ||
275 | int ret = blkcipher_walk_virt(desc, walk); | ||
276 | unsigned int nbytes; | ||
277 | |||
278 | while ((nbytes = walk->nbytes)) { | ||
279 | /* only use complete blocks */ | ||
280 | unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); | ||
281 | u8 *out = walk->dst.virt.addr; | ||
282 | u8 *in = walk->src.virt.addr; | ||
283 | |||
284 | ret = crypt_s390_km(func, param, out, in, n); | ||
285 | BUG_ON((ret < 0) || (ret != n)); | ||
286 | |||
287 | nbytes &= AES_BLOCK_SIZE - 1; | ||
288 | ret = blkcipher_walk_done(desc, walk, nbytes); | ||
289 | } | ||
290 | |||
291 | return ret; | ||
292 | } | ||
293 | |||
294 | static int ecb_aes_encrypt(struct blkcipher_desc *desc, | ||
295 | struct scatterlist *dst, struct scatterlist *src, | ||
296 | unsigned int nbytes) | ||
297 | { | ||
298 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
299 | struct blkcipher_walk walk; | ||
300 | |||
301 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
302 | return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); | ||
303 | } | ||
304 | |||
305 | static int ecb_aes_decrypt(struct blkcipher_desc *desc, | ||
306 | struct scatterlist *dst, struct scatterlist *src, | ||
307 | unsigned int nbytes) | ||
308 | { | ||
309 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
310 | struct blkcipher_walk walk; | ||
311 | |||
312 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
313 | return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk); | ||
314 | } | ||
315 | |||
316 | static struct crypto_alg ecb_aes_alg = { | ||
317 | .cra_name = "ecb(aes)", | ||
318 | .cra_driver_name = "ecb-aes-s390", | ||
319 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | ||
320 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
321 | .cra_blocksize = AES_BLOCK_SIZE, | ||
322 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | ||
323 | .cra_type = &crypto_blkcipher_type, | ||
324 | .cra_module = THIS_MODULE, | ||
325 | .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list), | ||
326 | .cra_u = { | ||
327 | .blkcipher = { | ||
328 | .min_keysize = AES_MIN_KEY_SIZE, | ||
329 | .max_keysize = AES_MAX_KEY_SIZE, | ||
330 | .setkey = ecb_aes_set_key, | ||
331 | .encrypt = ecb_aes_encrypt, | ||
332 | .decrypt = ecb_aes_decrypt, | ||
333 | } | ||
334 | } | ||
335 | }; | ||
336 | |||
337 | static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | ||
338 | unsigned int key_len) | ||
339 | { | ||
340 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
341 | |||
342 | switch (key_len) { | ||
343 | case 16: | ||
344 | sctx->enc = KMC_AES_128_ENCRYPT; | ||
345 | sctx->dec = KMC_AES_128_DECRYPT; | ||
346 | break; | ||
347 | case 24: | ||
348 | sctx->enc = KMC_AES_192_ENCRYPT; | ||
349 | sctx->dec = KMC_AES_192_DECRYPT; | ||
350 | break; | ||
351 | case 32: | ||
352 | sctx->enc = KMC_AES_256_ENCRYPT; | ||
353 | sctx->dec = KMC_AES_256_DECRYPT; | ||
354 | break; | ||
355 | } | ||
356 | |||
357 | return aes_set_key(tfm, in_key, key_len); | ||
358 | } | ||
359 | |||
360 | static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param, | ||
361 | struct blkcipher_walk *walk) | ||
362 | { | ||
363 | int ret = blkcipher_walk_virt(desc, walk); | ||
364 | unsigned int nbytes = walk->nbytes; | ||
365 | |||
366 | if (!nbytes) | ||
367 | goto out; | ||
368 | |||
369 | memcpy(param, walk->iv, AES_BLOCK_SIZE); | ||
370 | do { | ||
371 | /* only use complete blocks */ | ||
372 | unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); | ||
373 | u8 *out = walk->dst.virt.addr; | ||
374 | u8 *in = walk->src.virt.addr; | ||
375 | |||
376 | ret = crypt_s390_kmc(func, param, out, in, n); | ||
377 | BUG_ON((ret < 0) || (ret != n)); | ||
378 | |||
379 | nbytes &= AES_BLOCK_SIZE - 1; | ||
380 | ret = blkcipher_walk_done(desc, walk, nbytes); | ||
381 | } while ((nbytes = walk->nbytes)); | ||
382 | memcpy(walk->iv, param, AES_BLOCK_SIZE); | ||
383 | |||
384 | out: | ||
385 | return ret; | ||
386 | } | ||
387 | |||
388 | static int cbc_aes_encrypt(struct blkcipher_desc *desc, | ||
389 | struct scatterlist *dst, struct scatterlist *src, | ||
390 | unsigned int nbytes) | ||
391 | { | ||
392 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
393 | struct blkcipher_walk walk; | ||
394 | |||
395 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
396 | return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk); | ||
397 | } | ||
398 | |||
399 | static int cbc_aes_decrypt(struct blkcipher_desc *desc, | ||
400 | struct scatterlist *dst, struct scatterlist *src, | ||
401 | unsigned int nbytes) | ||
402 | { | ||
403 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
404 | struct blkcipher_walk walk; | ||
405 | |||
406 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
407 | return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk); | ||
408 | } | ||
409 | |||
410 | static struct crypto_alg cbc_aes_alg = { | ||
411 | .cra_name = "cbc(aes)", | ||
412 | .cra_driver_name = "cbc-aes-s390", | ||
413 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | ||
414 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
415 | .cra_blocksize = AES_BLOCK_SIZE, | ||
416 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | ||
417 | .cra_type = &crypto_blkcipher_type, | ||
418 | .cra_module = THIS_MODULE, | ||
419 | .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list), | ||
420 | .cra_u = { | ||
421 | .blkcipher = { | ||
422 | .min_keysize = AES_MIN_KEY_SIZE, | ||
423 | .max_keysize = AES_MAX_KEY_SIZE, | ||
424 | .ivsize = AES_BLOCK_SIZE, | ||
425 | .setkey = cbc_aes_set_key, | ||
426 | .encrypt = cbc_aes_encrypt, | ||
427 | .decrypt = cbc_aes_decrypt, | ||
428 | } | ||
429 | } | ||
430 | }; | ||
431 | |||
247 | static int __init aes_init(void) | 432 | static int __init aes_init(void) |
248 | { | 433 | { |
249 | int ret; | 434 | int ret; |
@@ -259,13 +444,40 @@ static int __init aes_init(void) | |||
259 | return -ENOSYS; | 444 | return -ENOSYS; |
260 | 445 | ||
261 | ret = crypto_register_alg(&aes_alg); | 446 | ret = crypto_register_alg(&aes_alg); |
262 | if (ret != 0) | 447 | if (ret != 0) { |
263 | printk(KERN_INFO "crypt_s390: aes_s390 couldn't be loaded.\n"); | 448 | printk(KERN_INFO "crypt_s390: aes-s390 couldn't be loaded.\n"); |
449 | goto aes_err; | ||
450 | } | ||
451 | |||
452 | ret = crypto_register_alg(&ecb_aes_alg); | ||
453 | if (ret != 0) { | ||
454 | printk(KERN_INFO | ||
455 | "crypt_s390: ecb-aes-s390 couldn't be loaded.\n"); | ||
456 | goto ecb_aes_err; | ||
457 | } | ||
458 | |||
459 | ret = crypto_register_alg(&cbc_aes_alg); | ||
460 | if (ret != 0) { | ||
461 | printk(KERN_INFO | ||
462 | "crypt_s390: cbc-aes-s390 couldn't be loaded.\n"); | ||
463 | goto cbc_aes_err; | ||
464 | } | ||
465 | |||
466 | out: | ||
264 | return ret; | 467 | return ret; |
468 | |||
469 | cbc_aes_err: | ||
470 | crypto_unregister_alg(&ecb_aes_alg); | ||
471 | ecb_aes_err: | ||
472 | crypto_unregister_alg(&aes_alg); | ||
473 | aes_err: | ||
474 | goto out; | ||
265 | } | 475 | } |
266 | 476 | ||
267 | static void __exit aes_fini(void) | 477 | static void __exit aes_fini(void) |
268 | { | 478 | { |
479 | crypto_unregister_alg(&cbc_aes_alg); | ||
480 | crypto_unregister_alg(&ecb_aes_alg); | ||
269 | crypto_unregister_alg(&aes_alg); | 481 | crypto_unregister_alg(&aes_alg); |
270 | } | 482 | } |
271 | 483 | ||
diff --git a/arch/s390/crypto/crypt_s390.h b/arch/s390/crypto/crypt_s390.h index d1d330797f75..efd836c2e4a6 100644 --- a/arch/s390/crypto/crypt_s390.h +++ b/arch/s390/crypto/crypt_s390.h | |||
@@ -21,6 +21,7 @@ | |||
21 | #define CRYPT_S390_FUNC_MASK 0x00FF | 21 | #define CRYPT_S390_FUNC_MASK 0x00FF |
22 | 22 | ||
23 | #define CRYPT_S390_PRIORITY 300 | 23 | #define CRYPT_S390_PRIORITY 300 |
24 | #define CRYPT_S390_COMPOSITE_PRIORITY 400 | ||
24 | 25 | ||
25 | /* s930 cryptographic operations */ | 26 | /* s930 cryptographic operations */ |
26 | enum crypt_s390_operations { | 27 | enum crypt_s390_operations { |
diff --git a/arch/s390/crypto/des_s390.c b/arch/s390/crypto/des_s390.c index 3fd5d37d5e05..a6d2385ccb7a 100644 --- a/arch/s390/crypto/des_s390.c +++ b/arch/s390/crypto/des_s390.c | |||
@@ -13,9 +13,10 @@ | |||
13 | * (at your option) any later version. | 13 | * (at your option) any later version. |
14 | * | 14 | * |
15 | */ | 15 | */ |
16 | |||
17 | #include <crypto/algapi.h> | ||
16 | #include <linux/init.h> | 18 | #include <linux/init.h> |
17 | #include <linux/module.h> | 19 | #include <linux/module.h> |
18 | #include <linux/crypto.h> | ||
19 | 20 | ||
20 | #include "crypt_s390.h" | 21 | #include "crypt_s390.h" |
21 | #include "crypto_des.h" | 22 | #include "crypto_des.h" |
@@ -157,6 +158,143 @@ static struct crypto_alg des_alg = { | |||
157 | } | 158 | } |
158 | }; | 159 | }; |
159 | 160 | ||
161 | static int ecb_desall_crypt(struct blkcipher_desc *desc, long func, | ||
162 | void *param, struct blkcipher_walk *walk) | ||
163 | { | ||
164 | int ret = blkcipher_walk_virt(desc, walk); | ||
165 | unsigned int nbytes; | ||
166 | |||
167 | while ((nbytes = walk->nbytes)) { | ||
168 | /* only use complete blocks */ | ||
169 | unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1); | ||
170 | u8 *out = walk->dst.virt.addr; | ||
171 | u8 *in = walk->src.virt.addr; | ||
172 | |||
173 | ret = crypt_s390_km(func, param, out, in, n); | ||
174 | BUG_ON((ret < 0) || (ret != n)); | ||
175 | |||
176 | nbytes &= DES_BLOCK_SIZE - 1; | ||
177 | ret = blkcipher_walk_done(desc, walk, nbytes); | ||
178 | } | ||
179 | |||
180 | return ret; | ||
181 | } | ||
182 | |||
183 | static int cbc_desall_crypt(struct blkcipher_desc *desc, long func, | ||
184 | void *param, struct blkcipher_walk *walk) | ||
185 | { | ||
186 | int ret = blkcipher_walk_virt(desc, walk); | ||
187 | unsigned int nbytes = walk->nbytes; | ||
188 | |||
189 | if (!nbytes) | ||
190 | goto out; | ||
191 | |||
192 | memcpy(param, walk->iv, DES_BLOCK_SIZE); | ||
193 | do { | ||
194 | /* only use complete blocks */ | ||
195 | unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1); | ||
196 | u8 *out = walk->dst.virt.addr; | ||
197 | u8 *in = walk->src.virt.addr; | ||
198 | |||
199 | ret = crypt_s390_kmc(func, param, out, in, n); | ||
200 | BUG_ON((ret < 0) || (ret != n)); | ||
201 | |||
202 | nbytes &= DES_BLOCK_SIZE - 1; | ||
203 | ret = blkcipher_walk_done(desc, walk, nbytes); | ||
204 | } while ((nbytes = walk->nbytes)); | ||
205 | memcpy(walk->iv, param, DES_BLOCK_SIZE); | ||
206 | |||
207 | out: | ||
208 | return ret; | ||
209 | } | ||
210 | |||
211 | static int ecb_des_encrypt(struct blkcipher_desc *desc, | ||
212 | struct scatterlist *dst, struct scatterlist *src, | ||
213 | unsigned int nbytes) | ||
214 | { | ||
215 | struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
216 | struct blkcipher_walk walk; | ||
217 | |||
218 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
219 | return ecb_desall_crypt(desc, KM_DEA_ENCRYPT, sctx->key, &walk); | ||
220 | } | ||
221 | |||
222 | static int ecb_des_decrypt(struct blkcipher_desc *desc, | ||
223 | struct scatterlist *dst, struct scatterlist *src, | ||
224 | unsigned int nbytes) | ||
225 | { | ||
226 | struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
227 | struct blkcipher_walk walk; | ||
228 | |||
229 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
230 | return ecb_desall_crypt(desc, KM_DEA_DECRYPT, sctx->key, &walk); | ||
231 | } | ||
232 | |||
233 | static struct crypto_alg ecb_des_alg = { | ||
234 | .cra_name = "ecb(des)", | ||
235 | .cra_driver_name = "ecb-des-s390", | ||
236 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | ||
237 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
238 | .cra_blocksize = DES_BLOCK_SIZE, | ||
239 | .cra_ctxsize = sizeof(struct crypt_s390_des_ctx), | ||
240 | .cra_type = &crypto_blkcipher_type, | ||
241 | .cra_module = THIS_MODULE, | ||
242 | .cra_list = LIST_HEAD_INIT(ecb_des_alg.cra_list), | ||
243 | .cra_u = { | ||
244 | .blkcipher = { | ||
245 | .min_keysize = DES_KEY_SIZE, | ||
246 | .max_keysize = DES_KEY_SIZE, | ||
247 | .setkey = des_setkey, | ||
248 | .encrypt = ecb_des_encrypt, | ||
249 | .decrypt = ecb_des_decrypt, | ||
250 | } | ||
251 | } | ||
252 | }; | ||
253 | |||
254 | static int cbc_des_encrypt(struct blkcipher_desc *desc, | ||
255 | struct scatterlist *dst, struct scatterlist *src, | ||
256 | unsigned int nbytes) | ||
257 | { | ||
258 | struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
259 | struct blkcipher_walk walk; | ||
260 | |||
261 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
262 | return cbc_desall_crypt(desc, KMC_DEA_ENCRYPT, sctx->iv, &walk); | ||
263 | } | ||
264 | |||
265 | static int cbc_des_decrypt(struct blkcipher_desc *desc, | ||
266 | struct scatterlist *dst, struct scatterlist *src, | ||
267 | unsigned int nbytes) | ||
268 | { | ||
269 | struct crypt_s390_des_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
270 | struct blkcipher_walk walk; | ||
271 | |||
272 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
273 | return cbc_desall_crypt(desc, KMC_DEA_DECRYPT, sctx->iv, &walk); | ||
274 | } | ||
275 | |||
276 | static struct crypto_alg cbc_des_alg = { | ||
277 | .cra_name = "cbc(des)", | ||
278 | .cra_driver_name = "cbc-des-s390", | ||
279 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | ||
280 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
281 | .cra_blocksize = DES_BLOCK_SIZE, | ||
282 | .cra_ctxsize = sizeof(struct crypt_s390_des_ctx), | ||
283 | .cra_type = &crypto_blkcipher_type, | ||
284 | .cra_module = THIS_MODULE, | ||
285 | .cra_list = LIST_HEAD_INIT(cbc_des_alg.cra_list), | ||
286 | .cra_u = { | ||
287 | .blkcipher = { | ||
288 | .min_keysize = DES_KEY_SIZE, | ||
289 | .max_keysize = DES_KEY_SIZE, | ||
290 | .ivsize = DES_BLOCK_SIZE, | ||
291 | .setkey = des_setkey, | ||
292 | .encrypt = cbc_des_encrypt, | ||
293 | .decrypt = cbc_des_decrypt, | ||
294 | } | ||
295 | } | ||
296 | }; | ||
297 | |||
160 | /* | 298 | /* |
161 | * RFC2451: | 299 | * RFC2451: |
162 | * | 300 | * |
@@ -295,6 +433,95 @@ static struct crypto_alg des3_128_alg = { | |||
295 | } | 433 | } |
296 | }; | 434 | }; |
297 | 435 | ||
436 | static int ecb_des3_128_encrypt(struct blkcipher_desc *desc, | ||
437 | struct scatterlist *dst, | ||
438 | struct scatterlist *src, unsigned int nbytes) | ||
439 | { | ||
440 | struct crypt_s390_des3_128_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
441 | struct blkcipher_walk walk; | ||
442 | |||
443 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
444 | return ecb_desall_crypt(desc, KM_TDEA_128_ENCRYPT, sctx->key, &walk); | ||
445 | } | ||
446 | |||
447 | static int ecb_des3_128_decrypt(struct blkcipher_desc *desc, | ||
448 | struct scatterlist *dst, | ||
449 | struct scatterlist *src, unsigned int nbytes) | ||
450 | { | ||
451 | struct crypt_s390_des3_128_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
452 | struct blkcipher_walk walk; | ||
453 | |||
454 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
455 | return ecb_desall_crypt(desc, KM_TDEA_128_DECRYPT, sctx->key, &walk); | ||
456 | } | ||
457 | |||
458 | static struct crypto_alg ecb_des3_128_alg = { | ||
459 | .cra_name = "ecb(des3_ede128)", | ||
460 | .cra_driver_name = "ecb-des3_ede128-s390", | ||
461 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | ||
462 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
463 | .cra_blocksize = DES3_128_BLOCK_SIZE, | ||
464 | .cra_ctxsize = sizeof(struct crypt_s390_des3_128_ctx), | ||
465 | .cra_type = &crypto_blkcipher_type, | ||
466 | .cra_module = THIS_MODULE, | ||
467 | .cra_list = LIST_HEAD_INIT( | ||
468 | ecb_des3_128_alg.cra_list), | ||
469 | .cra_u = { | ||
470 | .blkcipher = { | ||
471 | .min_keysize = DES3_128_KEY_SIZE, | ||
472 | .max_keysize = DES3_128_KEY_SIZE, | ||
473 | .setkey = des3_128_setkey, | ||
474 | .encrypt = ecb_des3_128_encrypt, | ||
475 | .decrypt = ecb_des3_128_decrypt, | ||
476 | } | ||
477 | } | ||
478 | }; | ||
479 | |||
480 | static int cbc_des3_128_encrypt(struct blkcipher_desc *desc, | ||
481 | struct scatterlist *dst, | ||
482 | struct scatterlist *src, unsigned int nbytes) | ||
483 | { | ||
484 | struct crypt_s390_des3_128_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
485 | struct blkcipher_walk walk; | ||
486 | |||
487 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
488 | return cbc_desall_crypt(desc, KMC_TDEA_128_ENCRYPT, sctx->iv, &walk); | ||
489 | } | ||
490 | |||
491 | static int cbc_des3_128_decrypt(struct blkcipher_desc *desc, | ||
492 | struct scatterlist *dst, | ||
493 | struct scatterlist *src, unsigned int nbytes) | ||
494 | { | ||
495 | struct crypt_s390_des3_128_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
496 | struct blkcipher_walk walk; | ||
497 | |||
498 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
499 | return cbc_desall_crypt(desc, KMC_TDEA_128_DECRYPT, sctx->iv, &walk); | ||
500 | } | ||
501 | |||
502 | static struct crypto_alg cbc_des3_128_alg = { | ||
503 | .cra_name = "cbc(des3_ede128)", | ||
504 | .cra_driver_name = "cbc-des3_ede128-s390", | ||
505 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | ||
506 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
507 | .cra_blocksize = DES3_128_BLOCK_SIZE, | ||
508 | .cra_ctxsize = sizeof(struct crypt_s390_des3_128_ctx), | ||
509 | .cra_type = &crypto_blkcipher_type, | ||
510 | .cra_module = THIS_MODULE, | ||
511 | .cra_list = LIST_HEAD_INIT( | ||
512 | cbc_des3_128_alg.cra_list), | ||
513 | .cra_u = { | ||
514 | .blkcipher = { | ||
515 | .min_keysize = DES3_128_KEY_SIZE, | ||
516 | .max_keysize = DES3_128_KEY_SIZE, | ||
517 | .ivsize = DES3_128_BLOCK_SIZE, | ||
518 | .setkey = des3_128_setkey, | ||
519 | .encrypt = cbc_des3_128_encrypt, | ||
520 | .decrypt = cbc_des3_128_decrypt, | ||
521 | } | ||
522 | } | ||
523 | }; | ||
524 | |||
298 | /* | 525 | /* |
299 | * RFC2451: | 526 | * RFC2451: |
300 | * | 527 | * |
@@ -437,6 +664,95 @@ static struct crypto_alg des3_192_alg = { | |||
437 | } | 664 | } |
438 | }; | 665 | }; |
439 | 666 | ||
667 | static int ecb_des3_192_encrypt(struct blkcipher_desc *desc, | ||
668 | struct scatterlist *dst, | ||
669 | struct scatterlist *src, unsigned int nbytes) | ||
670 | { | ||
671 | struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
672 | struct blkcipher_walk walk; | ||
673 | |||
674 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
675 | return ecb_desall_crypt(desc, KM_TDEA_192_ENCRYPT, sctx->key, &walk); | ||
676 | } | ||
677 | |||
678 | static int ecb_des3_192_decrypt(struct blkcipher_desc *desc, | ||
679 | struct scatterlist *dst, | ||
680 | struct scatterlist *src, unsigned int nbytes) | ||
681 | { | ||
682 | struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
683 | struct blkcipher_walk walk; | ||
684 | |||
685 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
686 | return ecb_desall_crypt(desc, KM_TDEA_192_DECRYPT, sctx->key, &walk); | ||
687 | } | ||
688 | |||
689 | static struct crypto_alg ecb_des3_192_alg = { | ||
690 | .cra_name = "ecb(des3_ede)", | ||
691 | .cra_driver_name = "ecb-des3_ede-s390", | ||
692 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | ||
693 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
694 | .cra_blocksize = DES3_192_BLOCK_SIZE, | ||
695 | .cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx), | ||
696 | .cra_type = &crypto_blkcipher_type, | ||
697 | .cra_module = THIS_MODULE, | ||
698 | .cra_list = LIST_HEAD_INIT( | ||
699 | ecb_des3_192_alg.cra_list), | ||
700 | .cra_u = { | ||
701 | .blkcipher = { | ||
702 | .min_keysize = DES3_192_KEY_SIZE, | ||
703 | .max_keysize = DES3_192_KEY_SIZE, | ||
704 | .setkey = des3_192_setkey, | ||
705 | .encrypt = ecb_des3_192_encrypt, | ||
706 | .decrypt = ecb_des3_192_decrypt, | ||
707 | } | ||
708 | } | ||
709 | }; | ||
710 | |||
711 | static int cbc_des3_192_encrypt(struct blkcipher_desc *desc, | ||
712 | struct scatterlist *dst, | ||
713 | struct scatterlist *src, unsigned int nbytes) | ||
714 | { | ||
715 | struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
716 | struct blkcipher_walk walk; | ||
717 | |||
718 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
719 | return cbc_desall_crypt(desc, KMC_TDEA_192_ENCRYPT, sctx->iv, &walk); | ||
720 | } | ||
721 | |||
722 | static int cbc_des3_192_decrypt(struct blkcipher_desc *desc, | ||
723 | struct scatterlist *dst, | ||
724 | struct scatterlist *src, unsigned int nbytes) | ||
725 | { | ||
726 | struct crypt_s390_des3_192_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
727 | struct blkcipher_walk walk; | ||
728 | |||
729 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
730 | return cbc_desall_crypt(desc, KMC_TDEA_192_DECRYPT, sctx->iv, &walk); | ||
731 | } | ||
732 | |||
733 | static struct crypto_alg cbc_des3_192_alg = { | ||
734 | .cra_name = "cbc(des3_ede)", | ||
735 | .cra_driver_name = "cbc-des3_ede-s390", | ||
736 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | ||
737 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
738 | .cra_blocksize = DES3_192_BLOCK_SIZE, | ||
739 | .cra_ctxsize = sizeof(struct crypt_s390_des3_192_ctx), | ||
740 | .cra_type = &crypto_blkcipher_type, | ||
741 | .cra_module = THIS_MODULE, | ||
742 | .cra_list = LIST_HEAD_INIT( | ||
743 | cbc_des3_192_alg.cra_list), | ||
744 | .cra_u = { | ||
745 | .blkcipher = { | ||
746 | .min_keysize = DES3_192_KEY_SIZE, | ||
747 | .max_keysize = DES3_192_KEY_SIZE, | ||
748 | .ivsize = DES3_192_BLOCK_SIZE, | ||
749 | .setkey = des3_192_setkey, | ||
750 | .encrypt = cbc_des3_192_encrypt, | ||
751 | .decrypt = cbc_des3_192_decrypt, | ||
752 | } | ||
753 | } | ||
754 | }; | ||
755 | |||
440 | static int init(void) | 756 | static int init(void) |
441 | { | 757 | { |
442 | int ret = 0; | 758 | int ret = 0; |
@@ -446,22 +762,69 @@ static int init(void) | |||
446 | !crypt_s390_func_available(KM_TDEA_192_ENCRYPT)) | 762 | !crypt_s390_func_available(KM_TDEA_192_ENCRYPT)) |
447 | return -ENOSYS; | 763 | return -ENOSYS; |
448 | 764 | ||
449 | ret |= (crypto_register_alg(&des_alg) == 0) ? 0:1; | 765 | ret = crypto_register_alg(&des_alg); |
450 | ret |= (crypto_register_alg(&des3_128_alg) == 0) ? 0:2; | 766 | if (ret) |
451 | ret |= (crypto_register_alg(&des3_192_alg) == 0) ? 0:4; | 767 | goto des_err; |
452 | if (ret) { | 768 | ret = crypto_register_alg(&ecb_des_alg); |
453 | crypto_unregister_alg(&des3_192_alg); | 769 | if (ret) |
454 | crypto_unregister_alg(&des3_128_alg); | 770 | goto ecb_des_err; |
455 | crypto_unregister_alg(&des_alg); | 771 | ret = crypto_register_alg(&cbc_des_alg); |
456 | return -EEXIST; | 772 | if (ret) |
457 | } | 773 | goto cbc_des_err; |
458 | return 0; | 774 | |
775 | ret = crypto_register_alg(&des3_128_alg); | ||
776 | if (ret) | ||
777 | goto des3_128_err; | ||
778 | ret = crypto_register_alg(&ecb_des3_128_alg); | ||
779 | if (ret) | ||
780 | goto ecb_des3_128_err; | ||
781 | ret = crypto_register_alg(&cbc_des3_128_alg); | ||
782 | if (ret) | ||
783 | goto cbc_des3_128_err; | ||
784 | |||
785 | ret = crypto_register_alg(&des3_192_alg); | ||
786 | if (ret) | ||
787 | goto des3_192_err; | ||
788 | ret = crypto_register_alg(&ecb_des3_192_alg); | ||
789 | if (ret) | ||
790 | goto ecb_des3_192_err; | ||
791 | ret = crypto_register_alg(&cbc_des3_192_alg); | ||
792 | if (ret) | ||
793 | goto cbc_des3_192_err; | ||
794 | |||
795 | out: | ||
796 | return ret; | ||
797 | |||
798 | cbc_des3_192_err: | ||
799 | crypto_unregister_alg(&ecb_des3_192_alg); | ||
800 | ecb_des3_192_err: | ||
801 | crypto_unregister_alg(&des3_192_alg); | ||
802 | des3_192_err: | ||
803 | crypto_unregister_alg(&cbc_des3_128_alg); | ||
804 | cbc_des3_128_err: | ||
805 | crypto_unregister_alg(&ecb_des3_128_alg); | ||
806 | ecb_des3_128_err: | ||
807 | crypto_unregister_alg(&des3_128_alg); | ||
808 | des3_128_err: | ||
809 | crypto_unregister_alg(&cbc_des_alg); | ||
810 | cbc_des_err: | ||
811 | crypto_unregister_alg(&ecb_des_alg); | ||
812 | ecb_des_err: | ||
813 | crypto_unregister_alg(&des_alg); | ||
814 | des_err: | ||
815 | goto out; | ||
459 | } | 816 | } |
460 | 817 | ||
461 | static void __exit fini(void) | 818 | static void __exit fini(void) |
462 | { | 819 | { |
820 | crypto_unregister_alg(&cbc_des3_192_alg); | ||
821 | crypto_unregister_alg(&ecb_des3_192_alg); | ||
463 | crypto_unregister_alg(&des3_192_alg); | 822 | crypto_unregister_alg(&des3_192_alg); |
823 | crypto_unregister_alg(&cbc_des3_128_alg); | ||
824 | crypto_unregister_alg(&ecb_des3_128_alg); | ||
464 | crypto_unregister_alg(&des3_128_alg); | 825 | crypto_unregister_alg(&des3_128_alg); |
826 | crypto_unregister_alg(&cbc_des_alg); | ||
827 | crypto_unregister_alg(&ecb_des_alg); | ||
465 | crypto_unregister_alg(&des_alg); | 828 | crypto_unregister_alg(&des_alg); |
466 | } | 829 | } |
467 | 830 | ||