diff options
Diffstat (limited to 'arch/s390/crypto/aes_s390.c')
-rw-r--r-- | arch/s390/crypto/aes_s390.c | 226 |
1 files changed, 205 insertions, 21 deletions
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c index 812511bbb540..85246112ab5e 100644 --- a/arch/s390/crypto/aes_s390.c +++ b/arch/s390/crypto/aes_s390.c | |||
@@ -6,6 +6,7 @@ | |||
6 | * s390 Version: | 6 | * s390 Version: |
7 | * Copyright IBM Corp. 2005,2007 | 7 | * Copyright IBM Corp. 2005,2007 |
8 | * Author(s): Jan Glauber (jang@de.ibm.com) | 8 | * Author(s): Jan Glauber (jang@de.ibm.com) |
9 | * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback | ||
9 | * | 10 | * |
10 | * Derived from "crypto/aes_generic.c" | 11 | * Derived from "crypto/aes_generic.c" |
11 | * | 12 | * |
@@ -18,6 +19,7 @@ | |||
18 | 19 | ||
19 | #include <crypto/aes.h> | 20 | #include <crypto/aes.h> |
20 | #include <crypto/algapi.h> | 21 | #include <crypto/algapi.h> |
22 | #include <linux/err.h> | ||
21 | #include <linux/module.h> | 23 | #include <linux/module.h> |
22 | #include <linux/init.h> | 24 | #include <linux/init.h> |
23 | #include "crypt_s390.h" | 25 | #include "crypt_s390.h" |
@@ -34,45 +36,89 @@ struct s390_aes_ctx { | |||
34 | long enc; | 36 | long enc; |
35 | long dec; | 37 | long dec; |
36 | int key_len; | 38 | int key_len; |
39 | union { | ||
40 | struct crypto_blkcipher *blk; | ||
41 | struct crypto_cipher *cip; | ||
42 | } fallback; | ||
37 | }; | 43 | }; |
38 | 44 | ||
39 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | 45 | /* |
40 | unsigned int key_len) | 46 | * Check if the key_len is supported by the HW. |
47 | * Returns 0 if it is, a positive number if it is not and software fallback is | ||
48 | * required or a negative number in case the key size is not valid | ||
49 | */ | ||
50 | static int need_fallback(unsigned int key_len) | ||
41 | { | 51 | { |
42 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
43 | u32 *flags = &tfm->crt_flags; | ||
44 | |||
45 | switch (key_len) { | 52 | switch (key_len) { |
46 | case 16: | 53 | case 16: |
47 | if (!(keylen_flag & AES_KEYLEN_128)) | 54 | if (!(keylen_flag & AES_KEYLEN_128)) |
48 | goto fail; | 55 | return 1; |
49 | break; | 56 | break; |
50 | case 24: | 57 | case 24: |
51 | if (!(keylen_flag & AES_KEYLEN_192)) | 58 | if (!(keylen_flag & AES_KEYLEN_192)) |
52 | goto fail; | 59 | return 1; |
53 | |||
54 | break; | 60 | break; |
55 | case 32: | 61 | case 32: |
56 | if (!(keylen_flag & AES_KEYLEN_256)) | 62 | if (!(keylen_flag & AES_KEYLEN_256)) |
57 | goto fail; | 63 | return 1; |
58 | break; | 64 | break; |
59 | default: | 65 | default: |
60 | goto fail; | 66 | return -1; |
61 | break; | 67 | break; |
62 | } | 68 | } |
69 | return 0; | ||
70 | } | ||
71 | |||
72 | static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key, | ||
73 | unsigned int key_len) | ||
74 | { | ||
75 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
76 | int ret; | ||
77 | |||
78 | sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; | ||
79 | sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags & | ||
80 | CRYPTO_TFM_REQ_MASK); | ||
81 | |||
82 | ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len); | ||
83 | if (ret) { | ||
84 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; | ||
85 | tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags & | ||
86 | CRYPTO_TFM_RES_MASK); | ||
87 | } | ||
88 | return ret; | ||
89 | } | ||
90 | |||
91 | static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | ||
92 | unsigned int key_len) | ||
93 | { | ||
94 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
95 | u32 *flags = &tfm->crt_flags; | ||
96 | int ret; | ||
97 | |||
98 | ret = need_fallback(key_len); | ||
99 | if (ret < 0) { | ||
100 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | ||
101 | return -EINVAL; | ||
102 | } | ||
63 | 103 | ||
64 | sctx->key_len = key_len; | 104 | sctx->key_len = key_len; |
65 | memcpy(sctx->key, in_key, key_len); | 105 | if (!ret) { |
66 | return 0; | 106 | memcpy(sctx->key, in_key, key_len); |
67 | fail: | 107 | return 0; |
68 | *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | 108 | } |
69 | return -EINVAL; | 109 | |
110 | return setkey_fallback_cip(tfm, in_key, key_len); | ||
70 | } | 111 | } |
71 | 112 | ||
72 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) | 113 | static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) |
73 | { | 114 | { |
74 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | 115 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
75 | 116 | ||
117 | if (unlikely(need_fallback(sctx->key_len))) { | ||
118 | crypto_cipher_encrypt_one(sctx->fallback.cip, out, in); | ||
119 | return; | ||
120 | } | ||
121 | |||
76 | switch (sctx->key_len) { | 122 | switch (sctx->key_len) { |
77 | case 16: | 123 | case 16: |
78 | crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, | 124 | crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, |
@@ -93,6 +139,11 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) | |||
93 | { | 139 | { |
94 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | 140 | const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
95 | 141 | ||
142 | if (unlikely(need_fallback(sctx->key_len))) { | ||
143 | crypto_cipher_decrypt_one(sctx->fallback.cip, out, in); | ||
144 | return; | ||
145 | } | ||
146 | |||
96 | switch (sctx->key_len) { | 147 | switch (sctx->key_len) { |
97 | case 16: | 148 | case 16: |
98 | crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, | 149 | crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, |
@@ -109,6 +160,29 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) | |||
109 | } | 160 | } |
110 | } | 161 | } |
111 | 162 | ||
163 | static int fallback_init_cip(struct crypto_tfm *tfm) | ||
164 | { | ||
165 | const char *name = tfm->__crt_alg->cra_name; | ||
166 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
167 | |||
168 | sctx->fallback.cip = crypto_alloc_cipher(name, 0, | ||
169 | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); | ||
170 | |||
171 | if (IS_ERR(sctx->fallback.cip)) { | ||
172 | printk(KERN_ERR "Error allocating fallback algo %s\n", name); | ||
173 | return PTR_ERR(sctx->fallback.blk); | ||
174 | } | ||
175 | |||
176 | return 0; | ||
177 | } | ||
178 | |||
179 | static void fallback_exit_cip(struct crypto_tfm *tfm) | ||
180 | { | ||
181 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
182 | |||
183 | crypto_free_cipher(sctx->fallback.cip); | ||
184 | sctx->fallback.cip = NULL; | ||
185 | } | ||
112 | 186 | ||
113 | static struct crypto_alg aes_alg = { | 187 | static struct crypto_alg aes_alg = { |
114 | .cra_name = "aes", | 188 | .cra_name = "aes", |
@@ -120,6 +194,8 @@ static struct crypto_alg aes_alg = { | |||
120 | .cra_ctxsize = sizeof(struct s390_aes_ctx), | 194 | .cra_ctxsize = sizeof(struct s390_aes_ctx), |
121 | .cra_module = THIS_MODULE, | 195 | .cra_module = THIS_MODULE, |
122 | .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), | 196 | .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), |
197 | .cra_init = fallback_init_cip, | ||
198 | .cra_exit = fallback_exit_cip, | ||
123 | .cra_u = { | 199 | .cra_u = { |
124 | .cipher = { | 200 | .cipher = { |
125 | .cia_min_keysize = AES_MIN_KEY_SIZE, | 201 | .cia_min_keysize = AES_MIN_KEY_SIZE, |
@@ -131,10 +207,76 @@ static struct crypto_alg aes_alg = { | |||
131 | } | 207 | } |
132 | }; | 208 | }; |
133 | 209 | ||
210 | static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key, | ||
211 | unsigned int len) | ||
212 | { | ||
213 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
214 | unsigned int ret; | ||
215 | |||
216 | sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK; | ||
217 | sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags & | ||
218 | CRYPTO_TFM_REQ_MASK); | ||
219 | |||
220 | ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len); | ||
221 | if (ret) { | ||
222 | tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK; | ||
223 | tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags & | ||
224 | CRYPTO_TFM_RES_MASK); | ||
225 | } | ||
226 | return ret; | ||
227 | } | ||
228 | |||
229 | static int fallback_blk_dec(struct blkcipher_desc *desc, | ||
230 | struct scatterlist *dst, struct scatterlist *src, | ||
231 | unsigned int nbytes) | ||
232 | { | ||
233 | unsigned int ret; | ||
234 | struct crypto_blkcipher *tfm; | ||
235 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
236 | |||
237 | memcpy(crypto_blkcipher_crt(sctx->fallback.blk)->iv, desc->info, | ||
238 | AES_BLOCK_SIZE); | ||
239 | |||
240 | tfm = desc->tfm; | ||
241 | desc->tfm = sctx->fallback.blk; | ||
242 | |||
243 | ret = crypto_blkcipher_decrypt(desc, dst, src, nbytes); | ||
244 | |||
245 | desc->tfm = tfm; | ||
246 | return ret; | ||
247 | } | ||
248 | |||
249 | static int fallback_blk_enc(struct blkcipher_desc *desc, | ||
250 | struct scatterlist *dst, struct scatterlist *src, | ||
251 | unsigned int nbytes) | ||
252 | { | ||
253 | unsigned int ret; | ||
254 | struct crypto_blkcipher *tfm; | ||
255 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | ||
256 | |||
257 | memcpy(crypto_blkcipher_crt(sctx->fallback.blk)->iv, desc->info, | ||
258 | AES_BLOCK_SIZE); | ||
259 | |||
260 | tfm = desc->tfm; | ||
261 | desc->tfm = sctx->fallback.blk; | ||
262 | |||
263 | ret = crypto_blkcipher_encrypt(desc, dst, src, nbytes); | ||
264 | |||
265 | desc->tfm = tfm; | ||
266 | return ret; | ||
267 | } | ||
268 | |||
134 | static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | 269 | static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, |
135 | unsigned int key_len) | 270 | unsigned int key_len) |
136 | { | 271 | { |
137 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | 272 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
273 | int ret; | ||
274 | |||
275 | ret = need_fallback(key_len); | ||
276 | if (ret > 0) { | ||
277 | sctx->key_len = key_len; | ||
278 | return setkey_fallback_blk(tfm, in_key, key_len); | ||
279 | } | ||
138 | 280 | ||
139 | switch (key_len) { | 281 | switch (key_len) { |
140 | case 16: | 282 | case 16: |
@@ -183,6 +325,9 @@ static int ecb_aes_encrypt(struct blkcipher_desc *desc, | |||
183 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | 325 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
184 | struct blkcipher_walk walk; | 326 | struct blkcipher_walk walk; |
185 | 327 | ||
328 | if (unlikely(need_fallback(sctx->key_len))) | ||
329 | return fallback_blk_enc(desc, dst, src, nbytes); | ||
330 | |||
186 | blkcipher_walk_init(&walk, dst, src, nbytes); | 331 | blkcipher_walk_init(&walk, dst, src, nbytes); |
187 | return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); | 332 | return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); |
188 | } | 333 | } |
@@ -194,10 +339,37 @@ static int ecb_aes_decrypt(struct blkcipher_desc *desc, | |||
194 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | 339 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
195 | struct blkcipher_walk walk; | 340 | struct blkcipher_walk walk; |
196 | 341 | ||
342 | if (unlikely(need_fallback(sctx->key_len))) | ||
343 | return fallback_blk_dec(desc, dst, src, nbytes); | ||
344 | |||
197 | blkcipher_walk_init(&walk, dst, src, nbytes); | 345 | blkcipher_walk_init(&walk, dst, src, nbytes); |
198 | return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk); | 346 | return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk); |
199 | } | 347 | } |
200 | 348 | ||
349 | static int fallback_init_blk(struct crypto_tfm *tfm) | ||
350 | { | ||
351 | const char *name = tfm->__crt_alg->cra_name; | ||
352 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
353 | |||
354 | sctx->fallback.blk = crypto_alloc_blkcipher(name, 0, | ||
355 | CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK); | ||
356 | |||
357 | if (IS_ERR(sctx->fallback.blk)) { | ||
358 | printk(KERN_ERR "Error allocating fallback algo %s\n", name); | ||
359 | return PTR_ERR(sctx->fallback.blk); | ||
360 | } | ||
361 | |||
362 | return 0; | ||
363 | } | ||
364 | |||
365 | static void fallback_exit_blk(struct crypto_tfm *tfm) | ||
366 | { | ||
367 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | ||
368 | |||
369 | crypto_free_blkcipher(sctx->fallback.blk); | ||
370 | sctx->fallback.blk = NULL; | ||
371 | } | ||
372 | |||
201 | static struct crypto_alg ecb_aes_alg = { | 373 | static struct crypto_alg ecb_aes_alg = { |
202 | .cra_name = "ecb(aes)", | 374 | .cra_name = "ecb(aes)", |
203 | .cra_driver_name = "ecb-aes-s390", | 375 | .cra_driver_name = "ecb-aes-s390", |
@@ -209,6 +381,8 @@ static struct crypto_alg ecb_aes_alg = { | |||
209 | .cra_type = &crypto_blkcipher_type, | 381 | .cra_type = &crypto_blkcipher_type, |
210 | .cra_module = THIS_MODULE, | 382 | .cra_module = THIS_MODULE, |
211 | .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list), | 383 | .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list), |
384 | .cra_init = fallback_init_blk, | ||
385 | .cra_exit = fallback_exit_blk, | ||
212 | .cra_u = { | 386 | .cra_u = { |
213 | .blkcipher = { | 387 | .blkcipher = { |
214 | .min_keysize = AES_MIN_KEY_SIZE, | 388 | .min_keysize = AES_MIN_KEY_SIZE, |
@@ -224,6 +398,13 @@ static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, | |||
224 | unsigned int key_len) | 398 | unsigned int key_len) |
225 | { | 399 | { |
226 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); | 400 | struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); |
401 | int ret; | ||
402 | |||
403 | ret = need_fallback(key_len); | ||
404 | if (ret > 0) { | ||
405 | sctx->key_len = key_len; | ||
406 | return setkey_fallback_blk(tfm, in_key, key_len); | ||
407 | } | ||
227 | 408 | ||
228 | switch (key_len) { | 409 | switch (key_len) { |
229 | case 16: | 410 | case 16: |
@@ -278,6 +459,9 @@ static int cbc_aes_encrypt(struct blkcipher_desc *desc, | |||
278 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | 459 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
279 | struct blkcipher_walk walk; | 460 | struct blkcipher_walk walk; |
280 | 461 | ||
462 | if (unlikely(need_fallback(sctx->key_len))) | ||
463 | return fallback_blk_enc(desc, dst, src, nbytes); | ||
464 | |||
281 | blkcipher_walk_init(&walk, dst, src, nbytes); | 465 | blkcipher_walk_init(&walk, dst, src, nbytes); |
282 | return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk); | 466 | return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk); |
283 | } | 467 | } |
@@ -289,6 +473,9 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc, | |||
289 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); | 473 | struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); |
290 | struct blkcipher_walk walk; | 474 | struct blkcipher_walk walk; |
291 | 475 | ||
476 | if (unlikely(need_fallback(sctx->key_len))) | ||
477 | return fallback_blk_dec(desc, dst, src, nbytes); | ||
478 | |||
292 | blkcipher_walk_init(&walk, dst, src, nbytes); | 479 | blkcipher_walk_init(&walk, dst, src, nbytes); |
293 | return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk); | 480 | return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk); |
294 | } | 481 | } |
@@ -304,6 +491,8 @@ static struct crypto_alg cbc_aes_alg = { | |||
304 | .cra_type = &crypto_blkcipher_type, | 491 | .cra_type = &crypto_blkcipher_type, |
305 | .cra_module = THIS_MODULE, | 492 | .cra_module = THIS_MODULE, |
306 | .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list), | 493 | .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list), |
494 | .cra_init = fallback_init_blk, | ||
495 | .cra_exit = fallback_exit_blk, | ||
307 | .cra_u = { | 496 | .cra_u = { |
308 | .blkcipher = { | 497 | .blkcipher = { |
309 | .min_keysize = AES_MIN_KEY_SIZE, | 498 | .min_keysize = AES_MIN_KEY_SIZE, |
@@ -331,14 +520,10 @@ static int __init aes_init(void) | |||
331 | return -EOPNOTSUPP; | 520 | return -EOPNOTSUPP; |
332 | 521 | ||
333 | /* z9 109 and z9 BC/EC only support 128 bit key length */ | 522 | /* z9 109 and z9 BC/EC only support 128 bit key length */ |
334 | if (keylen_flag == AES_KEYLEN_128) { | 523 | if (keylen_flag == AES_KEYLEN_128) |
335 | aes_alg.cra_u.cipher.cia_max_keysize = AES_MIN_KEY_SIZE; | ||
336 | ecb_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE; | ||
337 | cbc_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE; | ||
338 | printk(KERN_INFO | 524 | printk(KERN_INFO |
339 | "aes_s390: hardware acceleration only available for" | 525 | "aes_s390: hardware acceleration only available for" |
340 | "128 bit keys\n"); | 526 | "128 bit keys\n"); |
341 | } | ||
342 | 527 | ||
343 | ret = crypto_register_alg(&aes_alg); | 528 | ret = crypto_register_alg(&aes_alg); |
344 | if (ret) | 529 | if (ret) |
@@ -377,4 +562,3 @@ MODULE_ALIAS("aes"); | |||
377 | 562 | ||
378 | MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); | 563 | MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); |
379 | MODULE_LICENSE("GPL"); | 564 | MODULE_LICENSE("GPL"); |
380 | |||