aboutsummaryrefslogtreecommitdiffstats
path: root/arch/s390
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2008-01-25 11:38:25 -0500
committerLinus Torvalds <torvalds@linux-foundation.org>2008-01-25 11:38:25 -0500
commiteba0e319c12fb098d66316a8eafbaaa9174a07c3 (patch)
treeb2703117db9e36bb3510654efd55361f61c54742 /arch/s390
parentdf8dc74e8a383eaf2d9b44b80a71ec6f0e52b42e (diff)
parent15e7b4452b72ae890f2fcb027b4c4fa63a1c9a7a (diff)
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (125 commits) [CRYPTO] twofish: Merge common glue code [CRYPTO] hifn_795x: Fixup container_of() usage [CRYPTO] cast6: inline bloat-- [CRYPTO] api: Set default CRYPTO_MINALIGN to unsigned long long [CRYPTO] tcrypt: Make xcbc available as a standalone test [CRYPTO] xcbc: Remove bogus hash/cipher test [CRYPTO] xcbc: Fix algorithm leak when block size check fails [CRYPTO] tcrypt: Zero axbuf in the right function [CRYPTO] padlock: Only reset the key once for each CBC and ECB operation [CRYPTO] api: Include sched.h for cond_resched in scatterwalk.h [CRYPTO] salsa20-asm: Remove unnecessary dependency on CRYPTO_SALSA20 [CRYPTO] tcrypt: Add select of AEAD [CRYPTO] salsa20: Add x86-64 assembly version [CRYPTO] salsa20_i586: Salsa20 stream cipher algorithm (i586 version) [CRYPTO] gcm: Introduce rfc4106 [CRYPTO] api: Show async type [CRYPTO] chainiv: Avoid lock spinning where possible [CRYPTO] seqiv: Add select AEAD in Kconfig [CRYPTO] scatterwalk: Handle zero nbytes in scatterwalk_map_and_copy [CRYPTO] null: Allow setkey on digest_null ...
Diffstat (limited to 'arch/s390')
-rw-r--r--arch/s390/crypto/aes_s390.c227
1 files changed, 200 insertions, 27 deletions
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c
index 512669691ad0..46c97058ebe1 100644
--- a/arch/s390/crypto/aes_s390.c
+++ b/arch/s390/crypto/aes_s390.c
@@ -6,6 +6,7 @@
6 * s390 Version: 6 * s390 Version:
7 * Copyright IBM Corp. 2005,2007 7 * Copyright IBM Corp. 2005,2007
8 * Author(s): Jan Glauber (jang@de.ibm.com) 8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
9 * 10 *
10 * Derived from "crypto/aes_generic.c" 11 * Derived from "crypto/aes_generic.c"
11 * 12 *
@@ -16,17 +17,13 @@
16 * 17 *
17 */ 18 */
18 19
20#include <crypto/aes.h>
19#include <crypto/algapi.h> 21#include <crypto/algapi.h>
22#include <linux/err.h>
20#include <linux/module.h> 23#include <linux/module.h>
21#include <linux/init.h> 24#include <linux/init.h>
22#include "crypt_s390.h" 25#include "crypt_s390.h"
23 26
24#define AES_MIN_KEY_SIZE 16
25#define AES_MAX_KEY_SIZE 32
26
27/* data block size for all key lengths */
28#define AES_BLOCK_SIZE 16
29
30#define AES_KEYLEN_128 1 27#define AES_KEYLEN_128 1
31#define AES_KEYLEN_192 2 28#define AES_KEYLEN_192 2
32#define AES_KEYLEN_256 4 29#define AES_KEYLEN_256 4
@@ -39,45 +36,89 @@ struct s390_aes_ctx {
39 long enc; 36 long enc;
40 long dec; 37 long dec;
41 int key_len; 38 int key_len;
39 union {
40 struct crypto_blkcipher *blk;
41 struct crypto_cipher *cip;
42 } fallback;
42}; 43};
43 44
44static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 45/*
45 unsigned int key_len) 46 * Check if the key_len is supported by the HW.
47 * Returns 0 if it is, a positive number if it is not and software fallback is
48 * required or a negative number in case the key size is not valid
49 */
50static int need_fallback(unsigned int key_len)
46{ 51{
47 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
48 u32 *flags = &tfm->crt_flags;
49
50 switch (key_len) { 52 switch (key_len) {
51 case 16: 53 case 16:
52 if (!(keylen_flag & AES_KEYLEN_128)) 54 if (!(keylen_flag & AES_KEYLEN_128))
53 goto fail; 55 return 1;
54 break; 56 break;
55 case 24: 57 case 24:
56 if (!(keylen_flag & AES_KEYLEN_192)) 58 if (!(keylen_flag & AES_KEYLEN_192))
57 goto fail; 59 return 1;
58
59 break; 60 break;
60 case 32: 61 case 32:
61 if (!(keylen_flag & AES_KEYLEN_256)) 62 if (!(keylen_flag & AES_KEYLEN_256))
62 goto fail; 63 return 1;
63 break; 64 break;
64 default: 65 default:
65 goto fail; 66 return -1;
66 break; 67 break;
67 } 68 }
69 return 0;
70}
71
72static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
73 unsigned int key_len)
74{
75 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
76 int ret;
77
78 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
79 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
80 CRYPTO_TFM_REQ_MASK);
81
82 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
83 if (ret) {
84 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
85 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
86 CRYPTO_TFM_RES_MASK);
87 }
88 return ret;
89}
90
91static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
92 unsigned int key_len)
93{
94 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
95 u32 *flags = &tfm->crt_flags;
96 int ret;
97
98 ret = need_fallback(key_len);
99 if (ret < 0) {
100 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
101 return -EINVAL;
102 }
68 103
69 sctx->key_len = key_len; 104 sctx->key_len = key_len;
70 memcpy(sctx->key, in_key, key_len); 105 if (!ret) {
71 return 0; 106 memcpy(sctx->key, in_key, key_len);
72fail: 107 return 0;
73 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 108 }
74 return -EINVAL; 109
110 return setkey_fallback_cip(tfm, in_key, key_len);
75} 111}
76 112
77static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 113static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
78{ 114{
79 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 115 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
80 116
117 if (unlikely(need_fallback(sctx->key_len))) {
118 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
119 return;
120 }
121
81 switch (sctx->key_len) { 122 switch (sctx->key_len) {
82 case 16: 123 case 16:
83 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, 124 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
@@ -98,6 +139,11 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
98{ 139{
99 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 140 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
100 141
142 if (unlikely(need_fallback(sctx->key_len))) {
143 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
144 return;
145 }
146
101 switch (sctx->key_len) { 147 switch (sctx->key_len) {
102 case 16: 148 case 16:
103 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, 149 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
@@ -114,6 +160,29 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
114 } 160 }
115} 161}
116 162
163static int fallback_init_cip(struct crypto_tfm *tfm)
164{
165 const char *name = tfm->__crt_alg->cra_name;
166 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
167
168 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
169 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
170
171 if (IS_ERR(sctx->fallback.cip)) {
172 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
173 return PTR_ERR(sctx->fallback.blk);
174 }
175
176 return 0;
177}
178
179static void fallback_exit_cip(struct crypto_tfm *tfm)
180{
181 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
182
183 crypto_free_cipher(sctx->fallback.cip);
184 sctx->fallback.cip = NULL;
185}
117 186
118static struct crypto_alg aes_alg = { 187static struct crypto_alg aes_alg = {
119 .cra_name = "aes", 188 .cra_name = "aes",
@@ -125,6 +194,8 @@ static struct crypto_alg aes_alg = {
125 .cra_ctxsize = sizeof(struct s390_aes_ctx), 194 .cra_ctxsize = sizeof(struct s390_aes_ctx),
126 .cra_module = THIS_MODULE, 195 .cra_module = THIS_MODULE,
127 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), 196 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
197 .cra_init = fallback_init_cip,
198 .cra_exit = fallback_exit_cip,
128 .cra_u = { 199 .cra_u = {
129 .cipher = { 200 .cipher = {
130 .cia_min_keysize = AES_MIN_KEY_SIZE, 201 .cia_min_keysize = AES_MIN_KEY_SIZE,
@@ -136,10 +207,70 @@ static struct crypto_alg aes_alg = {
136 } 207 }
137}; 208};
138 209
210static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
211 unsigned int len)
212{
213 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
214 unsigned int ret;
215
216 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
217 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
218 CRYPTO_TFM_REQ_MASK);
219
220 ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
221 if (ret) {
222 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
223 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
224 CRYPTO_TFM_RES_MASK);
225 }
226 return ret;
227}
228
229static int fallback_blk_dec(struct blkcipher_desc *desc,
230 struct scatterlist *dst, struct scatterlist *src,
231 unsigned int nbytes)
232{
233 unsigned int ret;
234 struct crypto_blkcipher *tfm;
235 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
236
237 tfm = desc->tfm;
238 desc->tfm = sctx->fallback.blk;
239
240 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
241
242 desc->tfm = tfm;
243 return ret;
244}
245
246static int fallback_blk_enc(struct blkcipher_desc *desc,
247 struct scatterlist *dst, struct scatterlist *src,
248 unsigned int nbytes)
249{
250 unsigned int ret;
251 struct crypto_blkcipher *tfm;
252 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
253
254 tfm = desc->tfm;
255 desc->tfm = sctx->fallback.blk;
256
257 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
258
259 desc->tfm = tfm;
260 return ret;
261}
262
139static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 263static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
140 unsigned int key_len) 264 unsigned int key_len)
141{ 265{
142 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 266 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
267 int ret;
268
269 ret = need_fallback(key_len);
270 if (ret > 0) {
271 sctx->key_len = key_len;
272 return setkey_fallback_blk(tfm, in_key, key_len);
273 }
143 274
144 switch (key_len) { 275 switch (key_len) {
145 case 16: 276 case 16:
@@ -188,6 +319,9 @@ static int ecb_aes_encrypt(struct blkcipher_desc *desc,
188 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 319 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
189 struct blkcipher_walk walk; 320 struct blkcipher_walk walk;
190 321
322 if (unlikely(need_fallback(sctx->key_len)))
323 return fallback_blk_enc(desc, dst, src, nbytes);
324
191 blkcipher_walk_init(&walk, dst, src, nbytes); 325 blkcipher_walk_init(&walk, dst, src, nbytes);
192 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); 326 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
193} 327}
@@ -199,10 +333,37 @@ static int ecb_aes_decrypt(struct blkcipher_desc *desc,
199 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 333 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
200 struct blkcipher_walk walk; 334 struct blkcipher_walk walk;
201 335
336 if (unlikely(need_fallback(sctx->key_len)))
337 return fallback_blk_dec(desc, dst, src, nbytes);
338
202 blkcipher_walk_init(&walk, dst, src, nbytes); 339 blkcipher_walk_init(&walk, dst, src, nbytes);
203 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk); 340 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
204} 341}
205 342
343static int fallback_init_blk(struct crypto_tfm *tfm)
344{
345 const char *name = tfm->__crt_alg->cra_name;
346 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
347
348 sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
349 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
350
351 if (IS_ERR(sctx->fallback.blk)) {
352 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
353 return PTR_ERR(sctx->fallback.blk);
354 }
355
356 return 0;
357}
358
359static void fallback_exit_blk(struct crypto_tfm *tfm)
360{
361 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
362
363 crypto_free_blkcipher(sctx->fallback.blk);
364 sctx->fallback.blk = NULL;
365}
366
206static struct crypto_alg ecb_aes_alg = { 367static struct crypto_alg ecb_aes_alg = {
207 .cra_name = "ecb(aes)", 368 .cra_name = "ecb(aes)",
208 .cra_driver_name = "ecb-aes-s390", 369 .cra_driver_name = "ecb-aes-s390",
@@ -214,6 +375,8 @@ static struct crypto_alg ecb_aes_alg = {
214 .cra_type = &crypto_blkcipher_type, 375 .cra_type = &crypto_blkcipher_type,
215 .cra_module = THIS_MODULE, 376 .cra_module = THIS_MODULE,
216 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list), 377 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
378 .cra_init = fallback_init_blk,
379 .cra_exit = fallback_exit_blk,
217 .cra_u = { 380 .cra_u = {
218 .blkcipher = { 381 .blkcipher = {
219 .min_keysize = AES_MIN_KEY_SIZE, 382 .min_keysize = AES_MIN_KEY_SIZE,
@@ -229,6 +392,13 @@ static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
229 unsigned int key_len) 392 unsigned int key_len)
230{ 393{
231 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 394 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
395 int ret;
396
397 ret = need_fallback(key_len);
398 if (ret > 0) {
399 sctx->key_len = key_len;
400 return setkey_fallback_blk(tfm, in_key, key_len);
401 }
232 402
233 switch (key_len) { 403 switch (key_len) {
234 case 16: 404 case 16:
@@ -283,6 +453,9 @@ static int cbc_aes_encrypt(struct blkcipher_desc *desc,
283 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 453 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
284 struct blkcipher_walk walk; 454 struct blkcipher_walk walk;
285 455
456 if (unlikely(need_fallback(sctx->key_len)))
457 return fallback_blk_enc(desc, dst, src, nbytes);
458
286 blkcipher_walk_init(&walk, dst, src, nbytes); 459 blkcipher_walk_init(&walk, dst, src, nbytes);
287 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk); 460 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
288} 461}
@@ -294,6 +467,9 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc,
294 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 467 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
295 struct blkcipher_walk walk; 468 struct blkcipher_walk walk;
296 469
470 if (unlikely(need_fallback(sctx->key_len)))
471 return fallback_blk_dec(desc, dst, src, nbytes);
472
297 blkcipher_walk_init(&walk, dst, src, nbytes); 473 blkcipher_walk_init(&walk, dst, src, nbytes);
298 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk); 474 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
299} 475}
@@ -309,6 +485,8 @@ static struct crypto_alg cbc_aes_alg = {
309 .cra_type = &crypto_blkcipher_type, 485 .cra_type = &crypto_blkcipher_type,
310 .cra_module = THIS_MODULE, 486 .cra_module = THIS_MODULE,
311 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list), 487 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
488 .cra_init = fallback_init_blk,
489 .cra_exit = fallback_exit_blk,
312 .cra_u = { 490 .cra_u = {
313 .blkcipher = { 491 .blkcipher = {
314 .min_keysize = AES_MIN_KEY_SIZE, 492 .min_keysize = AES_MIN_KEY_SIZE,
@@ -336,14 +514,10 @@ static int __init aes_init(void)
336 return -EOPNOTSUPP; 514 return -EOPNOTSUPP;
337 515
338 /* z9 109 and z9 BC/EC only support 128 bit key length */ 516 /* z9 109 and z9 BC/EC only support 128 bit key length */
339 if (keylen_flag == AES_KEYLEN_128) { 517 if (keylen_flag == AES_KEYLEN_128)
340 aes_alg.cra_u.cipher.cia_max_keysize = AES_MIN_KEY_SIZE;
341 ecb_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
342 cbc_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
343 printk(KERN_INFO 518 printk(KERN_INFO
344 "aes_s390: hardware acceleration only available for" 519 "aes_s390: hardware acceleration only available for"
345 "128 bit keys\n"); 520 "128 bit keys\n");
346 }
347 521
348 ret = crypto_register_alg(&aes_alg); 522 ret = crypto_register_alg(&aes_alg);
349 if (ret) 523 if (ret)
@@ -382,4 +556,3 @@ MODULE_ALIAS("aes");
382 556
383MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); 557MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
384MODULE_LICENSE("GPL"); 558MODULE_LICENSE("GPL");
385