diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2006-08-21 07:39:24 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2006-09-20 21:44:50 -0400 |
commit | a9e62fadf0b02ba4a1d945d1a75652507da94319 (patch) | |
tree | 8e17290e66a3b0200d1a55b1798c81c9bb83e19d /arch/s390/crypto/aes_s390.c | |
parent | 28ce728a90cce3a0c6c0ed00354299de52db94b1 (diff) |
[CRYPTO] s390: Added block cipher versions of CBC/ECB
This patch adds block cipher algorithms for S390. Once all users of the
old cipher type have been converted the existing CBC/ECB non-block cipher
operations will be removed.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/s390/crypto/aes_s390.c')
-rw-r--r-- | arch/s390/crypto/aes_s390.c | 218 |
1 files changed, 215 insertions, 3 deletions
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index 220300e760d8..8f04b4e41b55 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c | |||
@@ -16,9 +16,9 @@ | |||
16 | * | 16 | * |
17 | */ | 17 | */ |
18 | 18 | ||
19 | #include <crypto/algapi.h> | ||
19 | #include <linux/module.h> | 20 | #include <linux/module.h> |
20 | #include <linux/init.h> | 21 | #include <linux/init.h> |
21 | #include <linux/crypto.h> | ||
22 | #include "crypt_s390.h" | 22 | #include "crypt_s390.h" |
23 | 23 | ||
24 | #define AES_MIN_KEY_SIZE 16 | 24 | #define AES_MIN_KEY_SIZE 16 |
@@ -34,6 +34,8 @@ int has_aes_256 = 0; | |||
34 | struct s390_aes_ctx { | 34 | struct s390_aes_ctx { |
35 | u8 iv[AES_BLOCK_SIZE]; | 35 | u8 iv[AES_BLOCK_SIZE]; |
36 | u8 key[AES_MAX_KEY_SIZE]; | 36 | u8 key[AES_MAX_KEY_SIZE]; |
37 | long enc; | ||
38 | long dec; | ||
37 | int key_len; | 39 | int key_len; |
38 | }; | 40 | }; |
39 | 41 | ||
@@ -244,6 +246,189 @@ static struct crypto_alg aes_alg = { | |||
244 | } | 246 | } |
245 | }; | 247 | }; |
246 | 248 | ||
249 | static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | ||
250 | unsigned int key_len) | ||
251 | { | ||
252 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
253 | |||
254 | switch (key_len) { | ||
255 | case 16: | ||
256 | sctx->enc = KM_AES_128_ENCRYPT; | ||
257 | sctx->dec = KM_AES_128_DECRYPT; | ||
258 | break; | ||
259 | case 24: | ||
260 | sctx->enc = KM_AES_192_ENCRYPT; | ||
261 | sctx->dec = KM_AES_192_DECRYPT; | ||
262 | break; | ||
263 | case 32: | ||
264 | sctx->enc = KM_AES_256_ENCRYPT; | ||
265 | sctx->dec = KM_AES_256_DECRYPT; | ||
266 | break; | ||
267 | } | ||
268 | |||
269 | return aes_set_key(tfm, in_key, key_len); | ||
270 | } | ||
271 | |||
272 | static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param, | ||
273 | struct blkcipher_walk *walk) | ||
274 | { | ||
275 | int ret = blkcipher_walk_virt(desc, walk); | ||
276 | unsigned int nbytes; | ||
277 | |||
278 | while ((nbytes = walk->nbytes)) { | ||
279 | /* only use complete blocks */ | ||
280 | unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); | ||
281 | u8 *out = walk->dst.virt.addr; | ||
282 | u8 *in = walk->src.virt.addr; | ||
283 | |||
284 | ret = crypt_s390_km(func, param, out, in, n); | ||
285 | BUG_ON((ret < 0) || (ret != n)); | ||
286 | |||
287 | nbytes &= AES_BLOCK_SIZE - 1; | ||
288 | ret = blkcipher_walk_done(desc, walk, nbytes); | ||
289 | } | ||
290 | |||
291 | return ret; | ||
292 | } | ||
293 | |||
294 | static int ecb_aes_encrypt(struct blkcipher_desc *desc, | ||
295 | struct scatterlist *dst, struct scatterlist *src, | ||
296 | unsigned int nbytes) | ||
297 | { | ||
298 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
299 | struct blkcipher_walk walk; | ||
300 | |||
301 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
302 | return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); | ||
303 | } | ||
304 | |||
305 | static int ecb_aes_decrypt(struct blkcipher_desc *desc, | ||
306 | struct scatterlist *dst, struct scatterlist *src, | ||
307 | unsigned int nbytes) | ||
308 | { | ||
309 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
310 | struct blkcipher_walk walk; | ||
311 | |||
312 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
313 | return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk); | ||
314 | } | ||
315 | |||
316 | static struct crypto_alg ecb_aes_alg = { | ||
317 | .cra_name = "ecb(aes)", | ||
318 | .cra_driver_name = "ecb-aes-s390", | ||
319 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | ||
320 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
321 | .cra_blocksize = AES_BLOCK_SIZE, | ||
322 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | ||
323 | .cra_type = &crypto_blkcipher_type, | ||
324 | .cra_module = THIS_MODULE, | ||
325 | .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list), | ||
326 | .cra_u = { | ||
327 | .blkcipher = { | ||
328 | .min_keysize = AES_MIN_KEY_SIZE, | ||
329 | .max_keysize = AES_MAX_KEY_SIZE, | ||
330 | .setkey = ecb_aes_set_key, | ||
331 | .encrypt = ecb_aes_encrypt, | ||
332 | .decrypt = ecb_aes_decrypt, | ||
333 | } | ||
334 | } | ||
335 | }; | ||
336 | |||
337 | static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | ||
338 | unsigned int key_len) | ||
339 | { | ||
340 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
341 | |||
342 | switch (key_len) { | ||
343 | case 16: | ||
344 | sctx->enc = KMC_AES_128_ENCRYPT; | ||
345 | sctx->dec = KMC_AES_128_DECRYPT; | ||
346 | break; | ||
347 | case 24: | ||
348 | sctx->enc = KMC_AES_192_ENCRYPT; | ||
349 | sctx->dec = KMC_AES_192_DECRYPT; | ||
350 | break; | ||
351 | case 32: | ||
352 | sctx->enc = KMC_AES_256_ENCRYPT; | ||
353 | sctx->dec = KMC_AES_256_DECRYPT; | ||
354 | break; | ||
355 | } | ||
356 | |||
357 | return aes_set_key(tfm, in_key, key_len); | ||
358 | } | ||
359 | |||
360 | static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param, | ||
361 | struct blkcipher_walk *walk) | ||
362 | { | ||
363 | int ret = blkcipher_walk_virt(desc, walk); | ||
364 | unsigned int nbytes = walk->nbytes; | ||
365 | |||
366 | if (!nbytes) | ||
367 | goto out; | ||
368 | |||
369 | memcpy(param, walk->iv, AES_BLOCK_SIZE); | ||
370 | do { | ||
371 | /* only use complete blocks */ | ||
372 | unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); | ||
373 | u8 *out = walk->dst.virt.addr; | ||
374 | u8 *in = walk->src.virt.addr; | ||
375 | |||
376 | ret = crypt_s390_kmc(func, param, out, in, n); | ||
377 | BUG_ON((ret < 0) || (ret != n)); | ||
378 | |||
379 | nbytes &= AES_BLOCK_SIZE - 1; | ||
380 | ret = blkcipher_walk_done(desc, walk, nbytes); | ||
381 | } while ((nbytes = walk->nbytes)); | ||
382 | memcpy(walk->iv, param, AES_BLOCK_SIZE); | ||
383 | |||
384 | out: | ||
385 | return ret; | ||
386 | } | ||
387 | |||
388 | static int cbc_aes_encrypt(struct blkcipher_desc *desc, | ||
389 | struct scatterlist *dst, struct scatterlist *src, | ||
390 | unsigned int nbytes) | ||
391 | { | ||
392 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
393 | struct blkcipher_walk walk; | ||
394 | |||
395 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
396 | return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk); | ||
397 | } | ||
398 | |||
399 | static int cbc_aes_decrypt(struct blkcipher_desc *desc, | ||
400 | struct scatterlist *dst, struct scatterlist *src, | ||
401 | unsigned int nbytes) | ||
402 | { | ||
403 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
404 | struct blkcipher_walk walk; | ||
405 | |||
406 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
407 | return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk); | ||
408 | } | ||
409 | |||
410 | static struct crypto_alg cbc_aes_alg = { | ||
411 | .cra_name = "cbc(aes)", | ||
412 | .cra_driver_name = "cbc-aes-s390", | ||
413 | .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, | ||
414 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
415 | .cra_blocksize = AES_BLOCK_SIZE, | ||
416 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | ||
417 | .cra_type = &crypto_blkcipher_type, | ||
418 | .cra_module = THIS_MODULE, | ||
419 | .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list), | ||
420 | .cra_u = { | ||
421 | .blkcipher = { | ||
422 | .min_keysize = AES_MIN_KEY_SIZE, | ||
423 | .max_keysize = AES_MAX_KEY_SIZE, | ||
424 | .ivsize = AES_BLOCK_SIZE, | ||
425 | .setkey = cbc_aes_set_key, | ||
426 | .encrypt = cbc_aes_encrypt, | ||
427 | .decrypt = cbc_aes_decrypt, | ||
428 | } | ||
429 | } | ||
430 | }; | ||
431 | |||
247 | static int __init aes_init(void) | 432 | static int __init aes_init(void) |
248 | { | 433 | { |
249 | int ret; | 434 | int ret; |
@@ -259,13 +444,40 @@ static int __init aes_init(void) | |||
259 | return -ENOSYS; | 444 | return -ENOSYS; |
260 | 445 | ||
261 | ret = crypto_register_alg(&aes_alg); | 446 | ret = crypto_register_alg(&aes_alg); |
262 | if (ret != 0) | 447 | if (ret != 0) { |
263 | printk(KERN_INFO "crypt_s390: aes_s390 couldn't be loaded.\n"); | 448 | printk(KERN_INFO "crypt_s390: aes-s390 couldn't be loaded.\n"); |
449 | goto aes_err; | ||
450 | } | ||
451 | |||
452 | ret = crypto_register_alg(&ecb_aes_alg); | ||
453 | if (ret != 0) { | ||
454 | printk(KERN_INFO | ||
455 | "crypt_s390: ecb-aes-s390 couldn't be loaded.\n"); | ||
456 | goto ecb_aes_err; | ||
457 | } | ||
458 | |||
459 | ret = crypto_register_alg(&cbc_aes_alg); | ||
460 | if (ret != 0) { | ||
461 | printk(KERN_INFO | ||
462 | "crypt_s390: cbc-aes-s390 couldn't be loaded.\n"); | ||
463 | goto cbc_aes_err; | ||
464 | } | ||
465 | |||
466 | out: | ||
264 | return ret; | 467 | return ret; |
468 | |||
469 | cbc_aes_err: | ||
470 | crypto_unregister_alg(&ecb_aes_alg); | ||
471 | ecb_aes_err: | ||
472 | crypto_unregister_alg(&aes_alg); | ||
473 | aes_err: | ||
474 | goto out; | ||
265 | } | 475 | } |
266 | 476 | ||
267 | static void __exit aes_fini(void) | 477 | static void __exit aes_fini(void) |
268 | { | 478 | { |
479 | crypto_unregister_alg(&cbc_aes_alg); | ||
480 | crypto_unregister_alg(&ecb_aes_alg); | ||
269 | crypto_unregister_alg(&aes_alg); | 481 | crypto_unregister_alg(&aes_alg); |
270 | } | 482 | } |
271 | 483 | ||