aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMartin Schwidefsky <schwidefsky@de.ibm.com>2016-11-04 06:57:15 -0400
committerMartin Schwidefsky <schwidefsky@de.ibm.com>2017-02-24 02:31:44 -0500
commit2793784307688a7a72bd322727a2cb11dede875f (patch)
tree009501aa3f2e1ea73a2188c8e34f7fe4dd9f523f
parent8693b9145b13dc44664602cd4cbe71862c26d0b5 (diff)
s390/crypt: Add protected key AES module
This patch introduces a new in-kernel-crypto blockcipher called 'paes' which implements AES with protected keys. The paes blockcipher can be used similar to the aes blockcipher but uses secure key material to derive the working protected key and so offers an encryption implementation where never a clear key value is exposed in memory. The paes module is only available for the s390 platform providing a minimal hardware support of CPACF enabled with at least MSA level 3. Upon module initialization these requirements are checked. Includes additional contribution from Harald Freudenberger. Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
-rw-r--r--arch/s390/crypto/Makefile2
-rw-r--r--arch/s390/crypto/paes_s390.c619
-rw-r--r--arch/s390/include/asm/cpacf.h14
-rw-r--r--drivers/crypto/Kconfig1
4 files changed, 634 insertions, 2 deletions
diff --git a/arch/s390/crypto/Makefile b/arch/s390/crypto/Makefile
index d1033de4c4ee..402c530c6da5 100644
--- a/arch/s390/crypto/Makefile
+++ b/arch/s390/crypto/Makefile
@@ -6,7 +6,7 @@ obj-$(CONFIG_CRYPTO_SHA1_S390) += sha1_s390.o sha_common.o
6obj-$(CONFIG_CRYPTO_SHA256_S390) += sha256_s390.o sha_common.o 6obj-$(CONFIG_CRYPTO_SHA256_S390) += sha256_s390.o sha_common.o
7obj-$(CONFIG_CRYPTO_SHA512_S390) += sha512_s390.o sha_common.o 7obj-$(CONFIG_CRYPTO_SHA512_S390) += sha512_s390.o sha_common.o
8obj-$(CONFIG_CRYPTO_DES_S390) += des_s390.o 8obj-$(CONFIG_CRYPTO_DES_S390) += des_s390.o
9obj-$(CONFIG_CRYPTO_AES_S390) += aes_s390.o 9obj-$(CONFIG_CRYPTO_AES_S390) += aes_s390.o paes_s390.o
10obj-$(CONFIG_S390_PRNG) += prng.o 10obj-$(CONFIG_S390_PRNG) += prng.o
11obj-$(CONFIG_CRYPTO_GHASH_S390) += ghash_s390.o 11obj-$(CONFIG_CRYPTO_GHASH_S390) += ghash_s390.o
12obj-$(CONFIG_CRYPTO_CRC32_S390) += crc32-vx_s390.o 12obj-$(CONFIG_CRYPTO_CRC32_S390) += crc32-vx_s390.o
diff --git a/arch/s390/crypto/paes_s390.c b/arch/s390/crypto/paes_s390.c
new file mode 100644
index 000000000000..d69ea495c4d7
--- /dev/null
+++ b/arch/s390/crypto/paes_s390.c
@@ -0,0 +1,619 @@
1/*
2 * Cryptographic API.
3 *
4 * s390 implementation of the AES Cipher Algorithm with protected keys.
5 *
6 * s390 Version:
7 * Copyright IBM Corp. 2017
8 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
9 * Harald Freudenberger <freude@de.ibm.com>
10 *
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License (version 2 only)
13 * as published by the Free Software Foundation.
14 *
15 */
16
17#define KMSG_COMPONENT "paes_s390"
18#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
19
20#include <crypto/aes.h>
21#include <crypto/algapi.h>
22#include <linux/bug.h>
23#include <linux/err.h>
24#include <linux/module.h>
25#include <linux/cpufeature.h>
26#include <linux/init.h>
27#include <linux/spinlock.h>
28#include <crypto/xts.h>
29#include <asm/cpacf.h>
30#include <asm/pkey.h>
31
32static u8 *ctrblk;
33static DEFINE_SPINLOCK(ctrblk_lock);
34
35static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
36
37struct s390_paes_ctx {
38 struct pkey_seckey sk;
39 struct pkey_protkey pk;
40 unsigned long fc;
41};
42
43struct s390_pxts_ctx {
44 struct pkey_seckey sk[2];
45 struct pkey_protkey pk[2];
46 unsigned long fc;
47};
48
49static inline int __paes_convert_key(struct pkey_seckey *sk,
50 struct pkey_protkey *pk)
51{
52 int i, ret;
53
54 /* try three times in case of failure */
55 for (i = 0; i < 3; i++) {
56 ret = pkey_skey2pkey(sk, pk);
57 if (ret == 0)
58 break;
59 }
60
61 return ret;
62}
63
64static int __paes_set_key(struct s390_paes_ctx *ctx)
65{
66 unsigned long fc;
67
68 if (__paes_convert_key(&ctx->sk, &ctx->pk))
69 return -EINVAL;
70
71 /* Pick the correct function code based on the protected key type */
72 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PAES_128 :
73 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KM_PAES_192 :
74 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KM_PAES_256 : 0;
75
76 /* Check if the function code is available */
77 ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
78
79 return ctx->fc ? 0 : -EINVAL;
80}
81
82static int ecb_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
83 unsigned int key_len)
84{
85 struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
86
87 if (key_len != SECKEYBLOBSIZE)
88 return -EINVAL;
89
90 memcpy(ctx->sk.seckey, in_key, SECKEYBLOBSIZE);
91 if (__paes_set_key(ctx)) {
92 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
93 return -EINVAL;
94 }
95 return 0;
96}
97
98static int ecb_paes_crypt(struct blkcipher_desc *desc,
99 unsigned long modifier,
100 struct blkcipher_walk *walk)
101{
102 struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
103 unsigned int nbytes, n, k;
104 int ret;
105
106 ret = blkcipher_walk_virt(desc, walk);
107 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
108 /* only use complete blocks */
109 n = nbytes & ~(AES_BLOCK_SIZE - 1);
110 k = cpacf_km(ctx->fc | modifier, ctx->pk.protkey,
111 walk->dst.virt.addr, walk->src.virt.addr, n);
112 if (k)
113 ret = blkcipher_walk_done(desc, walk, nbytes - k);
114 if (k < n) {
115 if (__paes_set_key(ctx) != 0)
116 return blkcipher_walk_done(desc, walk, -EIO);
117 }
118 }
119 return ret;
120}
121
122static int ecb_paes_encrypt(struct blkcipher_desc *desc,
123 struct scatterlist *dst, struct scatterlist *src,
124 unsigned int nbytes)
125{
126 struct blkcipher_walk walk;
127
128 blkcipher_walk_init(&walk, dst, src, nbytes);
129 return ecb_paes_crypt(desc, CPACF_ENCRYPT, &walk);
130}
131
132static int ecb_paes_decrypt(struct blkcipher_desc *desc,
133 struct scatterlist *dst, struct scatterlist *src,
134 unsigned int nbytes)
135{
136 struct blkcipher_walk walk;
137
138 blkcipher_walk_init(&walk, dst, src, nbytes);
139 return ecb_paes_crypt(desc, CPACF_DECRYPT, &walk);
140}
141
142static struct crypto_alg ecb_paes_alg = {
143 .cra_name = "ecb(paes)",
144 .cra_driver_name = "ecb-paes-s390",
145 .cra_priority = 400, /* combo: aes + ecb */
146 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
147 .cra_blocksize = AES_BLOCK_SIZE,
148 .cra_ctxsize = sizeof(struct s390_paes_ctx),
149 .cra_type = &crypto_blkcipher_type,
150 .cra_module = THIS_MODULE,
151 .cra_list = LIST_HEAD_INIT(ecb_paes_alg.cra_list),
152 .cra_u = {
153 .blkcipher = {
154 .min_keysize = SECKEYBLOBSIZE,
155 .max_keysize = SECKEYBLOBSIZE,
156 .setkey = ecb_paes_set_key,
157 .encrypt = ecb_paes_encrypt,
158 .decrypt = ecb_paes_decrypt,
159 }
160 }
161};
162
163static int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
164{
165 unsigned long fc;
166
167 if (__paes_convert_key(&ctx->sk, &ctx->pk))
168 return -EINVAL;
169
170 /* Pick the correct function code based on the protected key type */
171 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMC_PAES_128 :
172 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMC_PAES_192 :
173 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KMC_PAES_256 : 0;
174
175 /* Check if the function code is available */
176 ctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
177
178 return ctx->fc ? 0 : -EINVAL;
179}
180
181static int cbc_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
182 unsigned int key_len)
183{
184 struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
185
186 memcpy(ctx->sk.seckey, in_key, SECKEYBLOBSIZE);
187 if (__cbc_paes_set_key(ctx)) {
188 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
189 return -EINVAL;
190 }
191 return 0;
192}
193
194static int cbc_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
195 struct blkcipher_walk *walk)
196{
197 struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
198 unsigned int nbytes, n, k;
199 int ret;
200 struct {
201 u8 iv[AES_BLOCK_SIZE];
202 u8 key[MAXPROTKEYSIZE];
203 } param;
204
205 ret = blkcipher_walk_virt(desc, walk);
206 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
207 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
208 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
209 /* only use complete blocks */
210 n = nbytes & ~(AES_BLOCK_SIZE - 1);
211 k = cpacf_kmc(ctx->fc | modifier, &param,
212 walk->dst.virt.addr, walk->src.virt.addr, n);
213 if (k)
214 ret = blkcipher_walk_done(desc, walk, nbytes - k);
215 if (n < k) {
216 if (__cbc_paes_set_key(ctx) != 0)
217 return blkcipher_walk_done(desc, walk, -EIO);
218 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
219 }
220 }
221 memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
222 return ret;
223}
224
225static int cbc_paes_encrypt(struct blkcipher_desc *desc,
226 struct scatterlist *dst, struct scatterlist *src,
227 unsigned int nbytes)
228{
229 struct blkcipher_walk walk;
230
231 blkcipher_walk_init(&walk, dst, src, nbytes);
232 return cbc_paes_crypt(desc, 0, &walk);
233}
234
235static int cbc_paes_decrypt(struct blkcipher_desc *desc,
236 struct scatterlist *dst, struct scatterlist *src,
237 unsigned int nbytes)
238{
239 struct blkcipher_walk walk;
240
241 blkcipher_walk_init(&walk, dst, src, nbytes);
242 return cbc_paes_crypt(desc, CPACF_DECRYPT, &walk);
243}
244
245static struct crypto_alg cbc_paes_alg = {
246 .cra_name = "cbc(paes)",
247 .cra_driver_name = "cbc-paes-s390",
248 .cra_priority = 400, /* combo: aes + cbc */
249 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
250 .cra_blocksize = AES_BLOCK_SIZE,
251 .cra_ctxsize = sizeof(struct s390_paes_ctx),
252 .cra_type = &crypto_blkcipher_type,
253 .cra_module = THIS_MODULE,
254 .cra_list = LIST_HEAD_INIT(cbc_paes_alg.cra_list),
255 .cra_u = {
256 .blkcipher = {
257 .min_keysize = SECKEYBLOBSIZE,
258 .max_keysize = SECKEYBLOBSIZE,
259 .ivsize = AES_BLOCK_SIZE,
260 .setkey = cbc_paes_set_key,
261 .encrypt = cbc_paes_encrypt,
262 .decrypt = cbc_paes_decrypt,
263 }
264 }
265};
266
267static int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
268{
269 unsigned long fc;
270
271 if (__paes_convert_key(&ctx->sk[0], &ctx->pk[0]) ||
272 __paes_convert_key(&ctx->sk[1], &ctx->pk[1]))
273 return -EINVAL;
274
275 if (ctx->pk[0].type != ctx->pk[1].type)
276 return -EINVAL;
277
278 /* Pick the correct function code based on the protected key type */
279 fc = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PXTS_128 :
280 (ctx->pk[0].type == PKEY_KEYTYPE_AES_256) ?
281 CPACF_KM_PXTS_256 : 0;
282
283 /* Check if the function code is available */
284 ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
285
286 return ctx->fc ? 0 : -EINVAL;
287}
288
289static int xts_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
290 unsigned int key_len)
291{
292 struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
293 u8 ckey[2 * AES_MAX_KEY_SIZE];
294 unsigned int ckey_len;
295
296 memcpy(ctx->sk[0].seckey, in_key, SECKEYBLOBSIZE);
297 memcpy(ctx->sk[1].seckey, in_key + SECKEYBLOBSIZE, SECKEYBLOBSIZE);
298 if (__xts_paes_set_key(ctx)) {
299 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
300 return -EINVAL;
301 }
302
303 /*
304 * xts_check_key verifies the key length is not odd and makes
305 * sure that the two keys are not the same. This can be done
306 * on the two protected keys as well
307 */
308 ckey_len = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ?
309 AES_KEYSIZE_128 : AES_KEYSIZE_256;
310 memcpy(ckey, ctx->pk[0].protkey, ckey_len);
311 memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len);
312 return xts_check_key(tfm, ckey, 2*ckey_len);
313}
314
315static int xts_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
316 struct blkcipher_walk *walk)
317{
318 struct s390_pxts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
319 unsigned int keylen, offset, nbytes, n, k;
320 int ret;
321 struct {
322 u8 key[MAXPROTKEYSIZE]; /* key + verification pattern */
323 u8 tweak[16];
324 u8 block[16];
325 u8 bit[16];
326 u8 xts[16];
327 } pcc_param;
328 struct {
329 u8 key[MAXPROTKEYSIZE]; /* key + verification pattern */
330 u8 init[16];
331 } xts_param;
332
333 ret = blkcipher_walk_virt(desc, walk);
334 keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64;
335 offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0;
336retry:
337 memset(&pcc_param, 0, sizeof(pcc_param));
338 memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
339 memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen);
340 cpacf_pcc(ctx->fc, pcc_param.key + offset);
341
342 memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen);
343 memcpy(xts_param.init, pcc_param.xts, 16);
344
345 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
346 /* only use complete blocks */
347 n = nbytes & ~(AES_BLOCK_SIZE - 1);
348 k = cpacf_km(ctx->fc | modifier, xts_param.key + offset,
349 walk->dst.virt.addr, walk->src.virt.addr, n);
350 if (k)
351 ret = blkcipher_walk_done(desc, walk, nbytes - k);
352 if (k < n) {
353 if (__xts_paes_set_key(ctx) != 0)
354 return blkcipher_walk_done(desc, walk, -EIO);
355 goto retry;
356 }
357 }
358 return ret;
359}
360
361static int xts_paes_encrypt(struct blkcipher_desc *desc,
362 struct scatterlist *dst, struct scatterlist *src,
363 unsigned int nbytes)
364{
365 struct blkcipher_walk walk;
366
367 blkcipher_walk_init(&walk, dst, src, nbytes);
368 return xts_paes_crypt(desc, 0, &walk);
369}
370
371static int xts_paes_decrypt(struct blkcipher_desc *desc,
372 struct scatterlist *dst, struct scatterlist *src,
373 unsigned int nbytes)
374{
375 struct blkcipher_walk walk;
376
377 blkcipher_walk_init(&walk, dst, src, nbytes);
378 return xts_paes_crypt(desc, CPACF_DECRYPT, &walk);
379}
380
381static struct crypto_alg xts_paes_alg = {
382 .cra_name = "xts(paes)",
383 .cra_driver_name = "xts-paes-s390",
384 .cra_priority = 400, /* combo: aes + xts */
385 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
386 .cra_blocksize = AES_BLOCK_SIZE,
387 .cra_ctxsize = sizeof(struct s390_pxts_ctx),
388 .cra_type = &crypto_blkcipher_type,
389 .cra_module = THIS_MODULE,
390 .cra_list = LIST_HEAD_INIT(xts_paes_alg.cra_list),
391 .cra_u = {
392 .blkcipher = {
393 .min_keysize = 2 * SECKEYBLOBSIZE,
394 .max_keysize = 2 * SECKEYBLOBSIZE,
395 .ivsize = AES_BLOCK_SIZE,
396 .setkey = xts_paes_set_key,
397 .encrypt = xts_paes_encrypt,
398 .decrypt = xts_paes_decrypt,
399 }
400 }
401};
402
403static int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
404{
405 unsigned long fc;
406
407 if (__paes_convert_key(&ctx->sk, &ctx->pk))
408 return -EINVAL;
409
410 /* Pick the correct function code based on the protected key type */
411 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMCTR_PAES_128 :
412 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMCTR_PAES_192 :
413 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ?
414 CPACF_KMCTR_PAES_256 : 0;
415
416 /* Check if the function code is available */
417 ctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
418
419 return ctx->fc ? 0 : -EINVAL;
420}
421
422static int ctr_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
423 unsigned int key_len)
424{
425 struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
426
427 memcpy(ctx->sk.seckey, in_key, key_len);
428 if (__ctr_paes_set_key(ctx)) {
429 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
430 return -EINVAL;
431 }
432 return 0;
433}
434
435static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
436{
437 unsigned int i, n;
438
439 /* only use complete blocks, max. PAGE_SIZE */
440 memcpy(ctrptr, iv, AES_BLOCK_SIZE);
441 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
442 for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) {
443 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE);
444 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE);
445 ctrptr += AES_BLOCK_SIZE;
446 }
447 return n;
448}
449
450static int ctr_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
451 struct blkcipher_walk *walk)
452{
453 struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
454 u8 buf[AES_BLOCK_SIZE], *ctrptr;
455 unsigned int nbytes, n, k;
456 int ret, locked;
457
458 locked = spin_trylock(&ctrblk_lock);
459
460 ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
461 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
462 n = AES_BLOCK_SIZE;
463 if (nbytes >= 2*AES_BLOCK_SIZE && locked)
464 n = __ctrblk_init(ctrblk, walk->iv, nbytes);
465 ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv;
466 k = cpacf_kmctr(ctx->fc | modifier, ctx->pk.protkey,
467 walk->dst.virt.addr, walk->src.virt.addr,
468 n, ctrptr);
469 if (k) {
470 if (ctrptr == ctrblk)
471 memcpy(walk->iv, ctrptr + k - AES_BLOCK_SIZE,
472 AES_BLOCK_SIZE);
473 crypto_inc(walk->iv, AES_BLOCK_SIZE);
474 ret = blkcipher_walk_done(desc, walk, nbytes - n);
475 }
476 if (k < n) {
477 if (__ctr_paes_set_key(ctx) != 0)
478 return blkcipher_walk_done(desc, walk, -EIO);
479 }
480 }
481 if (locked)
482 spin_unlock(&ctrblk_lock);
483 /*
484 * final block may be < AES_BLOCK_SIZE, copy only nbytes
485 */
486 if (nbytes) {
487 while (1) {
488 if (cpacf_kmctr(ctx->fc | modifier,
489 ctx->pk.protkey, buf,
490 walk->src.virt.addr, AES_BLOCK_SIZE,
491 walk->iv) == AES_BLOCK_SIZE)
492 break;
493 if (__ctr_paes_set_key(ctx) != 0)
494 return blkcipher_walk_done(desc, walk, -EIO);
495 }
496 memcpy(walk->dst.virt.addr, buf, nbytes);
497 crypto_inc(walk->iv, AES_BLOCK_SIZE);
498 ret = blkcipher_walk_done(desc, walk, 0);
499 }
500
501 return ret;
502}
503
504static int ctr_paes_encrypt(struct blkcipher_desc *desc,
505 struct scatterlist *dst, struct scatterlist *src,
506 unsigned int nbytes)
507{
508 struct blkcipher_walk walk;
509
510 blkcipher_walk_init(&walk, dst, src, nbytes);
511 return ctr_paes_crypt(desc, 0, &walk);
512}
513
514static int ctr_paes_decrypt(struct blkcipher_desc *desc,
515 struct scatterlist *dst, struct scatterlist *src,
516 unsigned int nbytes)
517{
518 struct blkcipher_walk walk;
519
520 blkcipher_walk_init(&walk, dst, src, nbytes);
521 return ctr_paes_crypt(desc, CPACF_DECRYPT, &walk);
522}
523
524static struct crypto_alg ctr_paes_alg = {
525 .cra_name = "ctr(paes)",
526 .cra_driver_name = "ctr-paes-s390",
527 .cra_priority = 400, /* combo: aes + ctr */
528 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
529 .cra_blocksize = 1,
530 .cra_ctxsize = sizeof(struct s390_paes_ctx),
531 .cra_type = &crypto_blkcipher_type,
532 .cra_module = THIS_MODULE,
533 .cra_list = LIST_HEAD_INIT(ctr_paes_alg.cra_list),
534 .cra_u = {
535 .blkcipher = {
536 .min_keysize = SECKEYBLOBSIZE,
537 .max_keysize = SECKEYBLOBSIZE,
538 .ivsize = AES_BLOCK_SIZE,
539 .setkey = ctr_paes_set_key,
540 .encrypt = ctr_paes_encrypt,
541 .decrypt = ctr_paes_decrypt,
542 }
543 }
544};
545
546static inline void __crypto_unregister_alg(struct crypto_alg *alg)
547{
548 if (!list_empty(&alg->cra_list))
549 crypto_unregister_alg(alg);
550}
551
552static void paes_s390_fini(void)
553{
554 if (ctrblk)
555 free_page((unsigned long) ctrblk);
556 __crypto_unregister_alg(&ctr_paes_alg);
557 __crypto_unregister_alg(&xts_paes_alg);
558 __crypto_unregister_alg(&cbc_paes_alg);
559 __crypto_unregister_alg(&ecb_paes_alg);
560}
561
562static int __init paes_s390_init(void)
563{
564 int ret;
565
566 /* Query available functions for KM, KMC and KMCTR */
567 cpacf_query(CPACF_KM, &km_functions);
568 cpacf_query(CPACF_KMC, &kmc_functions);
569 cpacf_query(CPACF_KMCTR, &kmctr_functions);
570
571 if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) ||
572 cpacf_test_func(&km_functions, CPACF_KM_PAES_192) ||
573 cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) {
574 ret = crypto_register_alg(&ecb_paes_alg);
575 if (ret)
576 goto out_err;
577 }
578
579 if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) ||
580 cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) ||
581 cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) {
582 ret = crypto_register_alg(&cbc_paes_alg);
583 if (ret)
584 goto out_err;
585 }
586
587 if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) ||
588 cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) {
589 ret = crypto_register_alg(&xts_paes_alg);
590 if (ret)
591 goto out_err;
592 }
593
594 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) ||
595 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) ||
596 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) {
597 ret = crypto_register_alg(&ctr_paes_alg);
598 if (ret)
599 goto out_err;
600 ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
601 if (!ctrblk) {
602 ret = -ENOMEM;
603 goto out_err;
604 }
605 }
606
607 return 0;
608out_err:
609 paes_s390_fini();
610 return ret;
611}
612
613module_init(paes_s390_init);
614module_exit(paes_s390_fini);
615
616MODULE_ALIAS_CRYPTO("aes-all");
617
618MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
619MODULE_LICENSE("GPL");
diff --git a/arch/s390/include/asm/cpacf.h b/arch/s390/include/asm/cpacf.h
index 992a20868110..e2dfbf280d12 100644
--- a/arch/s390/include/asm/cpacf.h
+++ b/arch/s390/include/asm/cpacf.h
@@ -28,8 +28,9 @@
28#define CPACF_PPNO 0xb93c /* MSA5 */ 28#define CPACF_PPNO 0xb93c /* MSA5 */
29 29
30/* 30/*
31 * Decryption modifier bit 31 * En/decryption modifier bits
32 */ 32 */
33#define CPACF_ENCRYPT 0x00
33#define CPACF_DECRYPT 0x80 34#define CPACF_DECRYPT 0x80
34 35
35/* 36/*
@@ -42,8 +43,13 @@
42#define CPACF_KM_AES_128 0x12 43#define CPACF_KM_AES_128 0x12
43#define CPACF_KM_AES_192 0x13 44#define CPACF_KM_AES_192 0x13
44#define CPACF_KM_AES_256 0x14 45#define CPACF_KM_AES_256 0x14
46#define CPACF_KM_PAES_128 0x1a
47#define CPACF_KM_PAES_192 0x1b
48#define CPACF_KM_PAES_256 0x1c
45#define CPACF_KM_XTS_128 0x32 49#define CPACF_KM_XTS_128 0x32
46#define CPACF_KM_XTS_256 0x34 50#define CPACF_KM_XTS_256 0x34
51#define CPACF_KM_PXTS_128 0x3a
52#define CPACF_KM_PXTS_256 0x3c
47 53
48/* 54/*
49 * Function codes for the KMC (CIPHER MESSAGE WITH CHAINING) 55 * Function codes for the KMC (CIPHER MESSAGE WITH CHAINING)
@@ -56,6 +62,9 @@
56#define CPACF_KMC_AES_128 0x12 62#define CPACF_KMC_AES_128 0x12
57#define CPACF_KMC_AES_192 0x13 63#define CPACF_KMC_AES_192 0x13
58#define CPACF_KMC_AES_256 0x14 64#define CPACF_KMC_AES_256 0x14
65#define CPACF_KMC_PAES_128 0x1a
66#define CPACF_KMC_PAES_192 0x1b
67#define CPACF_KMC_PAES_256 0x1c
59#define CPACF_KMC_PRNG 0x43 68#define CPACF_KMC_PRNG 0x43
60 69
61/* 70/*
@@ -69,6 +78,9 @@
69#define CPACF_KMCTR_AES_128 0x12 78#define CPACF_KMCTR_AES_128 0x12
70#define CPACF_KMCTR_AES_192 0x13 79#define CPACF_KMCTR_AES_192 0x13
71#define CPACF_KMCTR_AES_256 0x14 80#define CPACF_KMCTR_AES_256 0x14
81#define CPACF_KMCTR_PAES_128 0x1a
82#define CPACF_KMCTR_PAES_192 0x1b
83#define CPACF_KMCTR_PAES_256 0x1c
72 84
73/* 85/*
74 * Function codes for the KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) 86 * Function codes for the KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST)
diff --git a/drivers/crypto/Kconfig b/drivers/crypto/Kconfig
index 57c2d434ea4b..9355660cf243 100644
--- a/drivers/crypto/Kconfig
+++ b/drivers/crypto/Kconfig
@@ -137,6 +137,7 @@ config CRYPTO_AES_S390
137 depends on S390 137 depends on S390
138 select CRYPTO_ALGAPI 138 select CRYPTO_ALGAPI
139 select CRYPTO_BLKCIPHER 139 select CRYPTO_BLKCIPHER
140 select PKEY
140 help 141 help
141 This is the s390 hardware accelerated implementation of the 142 This is the s390 hardware accelerated implementation of the
142 AES cipher algorithms (FIPS-197). 143 AES cipher algorithms (FIPS-197).