aboutsummaryrefslogtreecommitdiffstats
path: root/arch/s390
diff options
context:
space:
mode:
Diffstat (limited to 'arch/s390')
-rw-r--r--arch/s390/crypto/Makefile4
-rw-r--r--arch/s390/crypto/sha.h34
-rw-r--r--arch/s390/crypto/sha1_s390.c91
-rw-r--r--arch/s390/crypto/sha256_s390.c90
-rw-r--r--arch/s390/crypto/sha_common.c90
5 files changed, 138 insertions, 171 deletions
diff --git a/arch/s390/crypto/Makefile b/arch/s390/crypto/Makefile
index 14e552c5cc43..5f1a5f89cc9e 100644
--- a/arch/s390/crypto/Makefile
+++ b/arch/s390/crypto/Makefile
@@ -2,8 +2,8 @@
2# Cryptographic API 2# Cryptographic API
3# 3#
4 4
5obj-$(CONFIG_CRYPTO_SHA1_S390) += sha1_s390.o 5obj-$(CONFIG_CRYPTO_SHA1_S390) += sha1_s390.o sha_common.o
6obj-$(CONFIG_CRYPTO_SHA256_S390) += sha256_s390.o 6obj-$(CONFIG_CRYPTO_SHA256_S390) += sha256_s390.o sha_common.o
7obj-$(CONFIG_CRYPTO_DES_S390) += des_s390.o des_check_key.o 7obj-$(CONFIG_CRYPTO_DES_S390) += des_s390.o des_check_key.o
8obj-$(CONFIG_CRYPTO_AES_S390) += aes_s390.o 8obj-$(CONFIG_CRYPTO_AES_S390) += aes_s390.o
9obj-$(CONFIG_S390_PRNG) += prng.o 9obj-$(CONFIG_S390_PRNG) += prng.o
diff --git a/arch/s390/crypto/sha.h b/arch/s390/crypto/sha.h
new file mode 100644
index 000000000000..b7a52ab5db97
--- /dev/null
+++ b/arch/s390/crypto/sha.h
@@ -0,0 +1,34 @@
1/*
2 * Cryptographic API.
3 *
4 * s390 generic implementation of the SHA Secure Hash Algorithms.
5 *
6 * Copyright IBM Corp. 2007
7 * Author(s): Jan Glauber (jang@de.ibm.com)
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15#ifndef _CRYPTO_ARCH_S390_SHA_H
16#define _CRYPTO_ARCH_S390_SHA_H
17
18#include <linux/crypto.h>
19#include <crypto/sha.h>
20
21/* must be big enough for the largest SHA variant */
22#define SHA_MAX_BLOCK_SIZE SHA256_BLOCK_SIZE
23
24struct s390_sha_ctx {
25 u64 count; /* message length in bytes */
26 u32 state[8];
27 u8 buf[2 * SHA_MAX_BLOCK_SIZE];
28 int func; /* KIMD function to use */
29};
30
31void s390_sha_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len);
32void s390_sha_final(struct crypto_tfm *tfm, u8 *out);
33
34#endif
diff --git a/arch/s390/crypto/sha1_s390.c b/arch/s390/crypto/sha1_s390.c
index 9cf9eca22747..b3cb5a89b00d 100644
--- a/arch/s390/crypto/sha1_s390.c
+++ b/arch/s390/crypto/sha1_s390.c
@@ -29,16 +29,11 @@
29#include <crypto/sha.h> 29#include <crypto/sha.h>
30 30
31#include "crypt_s390.h" 31#include "crypt_s390.h"
32 32#include "sha.h"
33struct s390_sha1_ctx {
34 u64 count; /* message length */
35 u32 state[5];
36 u8 buf[2 * SHA1_BLOCK_SIZE];
37};
38 33
39static void sha1_init(struct crypto_tfm *tfm) 34static void sha1_init(struct crypto_tfm *tfm)
40{ 35{
41 struct s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm); 36 struct s390_sha_ctx *sctx = crypto_tfm_ctx(tfm);
42 37
43 sctx->state[0] = SHA1_H0; 38 sctx->state[0] = SHA1_H0;
44 sctx->state[1] = SHA1_H1; 39 sctx->state[1] = SHA1_H1;
@@ -46,79 +41,7 @@ static void sha1_init(struct crypto_tfm *tfm)
46 sctx->state[3] = SHA1_H3; 41 sctx->state[3] = SHA1_H3;
47 sctx->state[4] = SHA1_H4; 42 sctx->state[4] = SHA1_H4;
48 sctx->count = 0; 43 sctx->count = 0;
49} 44 sctx->func = KIMD_SHA_1;
50
51static void sha1_update(struct crypto_tfm *tfm, const u8 *data,
52 unsigned int len)
53{
54 struct s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm);
55 unsigned int index;
56 int ret;
57
58 /* how much is already in the buffer? */
59 index = sctx->count & 0x3f;
60
61 sctx->count += len;
62
63 if (index + len < SHA1_BLOCK_SIZE)
64 goto store;
65
66 /* process one stored block */
67 if (index) {
68 memcpy(sctx->buf + index, data, SHA1_BLOCK_SIZE - index);
69 ret = crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buf,
70 SHA1_BLOCK_SIZE);
71 BUG_ON(ret != SHA1_BLOCK_SIZE);
72 data += SHA1_BLOCK_SIZE - index;
73 len -= SHA1_BLOCK_SIZE - index;
74 }
75
76 /* process as many blocks as possible */
77 if (len >= SHA1_BLOCK_SIZE) {
78 ret = crypt_s390_kimd(KIMD_SHA_1, sctx->state, data,
79 len & ~(SHA1_BLOCK_SIZE - 1));
80 BUG_ON(ret != (len & ~(SHA1_BLOCK_SIZE - 1)));
81 data += ret;
82 len -= ret;
83 }
84
85store:
86 /* anything left? */
87 if (len)
88 memcpy(sctx->buf + index , data, len);
89}
90
91/* Add padding and return the message digest. */
92static void sha1_final(struct crypto_tfm *tfm, u8 *out)
93{
94 struct s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm);
95 u64 bits;
96 unsigned int index, end;
97 int ret;
98
99 /* must perform manual padding */
100 index = sctx->count & 0x3f;
101 end = (index < 56) ? SHA1_BLOCK_SIZE : (2 * SHA1_BLOCK_SIZE);
102
103 /* start pad with 1 */
104 sctx->buf[index] = 0x80;
105
106 /* pad with zeros */
107 index++;
108 memset(sctx->buf + index, 0x00, end - index - 8);
109
110 /* append message length */
111 bits = sctx->count * 8;
112 memcpy(sctx->buf + end - 8, &bits, sizeof(bits));
113
114 ret = crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buf, end);
115 BUG_ON(ret != end);
116
117 /* copy digest to out */
118 memcpy(out, sctx->state, SHA1_DIGEST_SIZE);
119
120 /* wipe context */
121 memset(sctx, 0, sizeof *sctx);
122} 45}
123 46
124static struct crypto_alg alg = { 47static struct crypto_alg alg = {
@@ -127,21 +50,20 @@ static struct crypto_alg alg = {
127 .cra_priority = CRYPT_S390_PRIORITY, 50 .cra_priority = CRYPT_S390_PRIORITY,
128 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 51 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
129 .cra_blocksize = SHA1_BLOCK_SIZE, 52 .cra_blocksize = SHA1_BLOCK_SIZE,
130 .cra_ctxsize = sizeof(struct s390_sha1_ctx), 53 .cra_ctxsize = sizeof(struct s390_sha_ctx),
131 .cra_module = THIS_MODULE, 54 .cra_module = THIS_MODULE,
132 .cra_list = LIST_HEAD_INIT(alg.cra_list), 55 .cra_list = LIST_HEAD_INIT(alg.cra_list),
133 .cra_u = { .digest = { 56 .cra_u = { .digest = {
134 .dia_digestsize = SHA1_DIGEST_SIZE, 57 .dia_digestsize = SHA1_DIGEST_SIZE,
135 .dia_init = sha1_init, 58 .dia_init = sha1_init,
136 .dia_update = sha1_update, 59 .dia_update = s390_sha_update,
137 .dia_final = sha1_final } } 60 .dia_final = s390_sha_final } }
138}; 61};
139 62
140static int __init sha1_s390_init(void) 63static int __init sha1_s390_init(void)
141{ 64{
142 if (!crypt_s390_func_available(KIMD_SHA_1)) 65 if (!crypt_s390_func_available(KIMD_SHA_1))
143 return -EOPNOTSUPP; 66 return -EOPNOTSUPP;
144
145 return crypto_register_alg(&alg); 67 return crypto_register_alg(&alg);
146} 68}
147 69
@@ -154,6 +76,5 @@ module_init(sha1_s390_init);
154module_exit(sha1_s390_fini); 76module_exit(sha1_s390_fini);
155 77
156MODULE_ALIAS("sha1"); 78MODULE_ALIAS("sha1");
157
158MODULE_LICENSE("GPL"); 79MODULE_LICENSE("GPL");
159MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm"); 80MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm");
diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c
index 2a3d756b35d4..19c03fb6ba7e 100644
--- a/arch/s390/crypto/sha256_s390.c
+++ b/arch/s390/crypto/sha256_s390.c
@@ -22,16 +22,11 @@
22#include <crypto/sha.h> 22#include <crypto/sha.h>
23 23
24#include "crypt_s390.h" 24#include "crypt_s390.h"
25 25#include "sha.h"
26struct s390_sha256_ctx {
27 u64 count; /* message length */
28 u32 state[8];
29 u8 buf[2 * SHA256_BLOCK_SIZE];
30};
31 26
32static void sha256_init(struct crypto_tfm *tfm) 27static void sha256_init(struct crypto_tfm *tfm)
33{ 28{
34 struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm); 29 struct s390_sha_ctx *sctx = crypto_tfm_ctx(tfm);
35 30
36 sctx->state[0] = SHA256_H0; 31 sctx->state[0] = SHA256_H0;
37 sctx->state[1] = SHA256_H1; 32 sctx->state[1] = SHA256_H1;
@@ -42,79 +37,7 @@ static void sha256_init(struct crypto_tfm *tfm)
42 sctx->state[6] = SHA256_H6; 37 sctx->state[6] = SHA256_H6;
43 sctx->state[7] = SHA256_H7; 38 sctx->state[7] = SHA256_H7;
44 sctx->count = 0; 39 sctx->count = 0;
45} 40 sctx->func = KIMD_SHA_256;
46
47static void sha256_update(struct crypto_tfm *tfm, const u8 *data,
48 unsigned int len)
49{
50 struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
51 unsigned int index;
52 int ret;
53
54 /* how much is already in the buffer? */
55 index = sctx->count & 0x3f;
56
57 sctx->count += len;
58
59 if ((index + len) < SHA256_BLOCK_SIZE)
60 goto store;
61
62 /* process one stored block */
63 if (index) {
64 memcpy(sctx->buf + index, data, SHA256_BLOCK_SIZE - index);
65 ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf,
66 SHA256_BLOCK_SIZE);
67 BUG_ON(ret != SHA256_BLOCK_SIZE);
68 data += SHA256_BLOCK_SIZE - index;
69 len -= SHA256_BLOCK_SIZE - index;
70 }
71
72 /* process as many blocks as possible */
73 if (len >= SHA256_BLOCK_SIZE) {
74 ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, data,
75 len & ~(SHA256_BLOCK_SIZE - 1));
76 BUG_ON(ret != (len & ~(SHA256_BLOCK_SIZE - 1)));
77 data += ret;
78 len -= ret;
79 }
80
81store:
82 /* anything left? */
83 if (len)
84 memcpy(sctx->buf + index , data, len);
85}
86
87/* Add padding and return the message digest */
88static void sha256_final(struct crypto_tfm *tfm, u8 *out)
89{
90 struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
91 u64 bits;
92 unsigned int index, end;
93 int ret;
94
95 /* must perform manual padding */
96 index = sctx->count & 0x3f;
97 end = (index < 56) ? SHA256_BLOCK_SIZE : (2 * SHA256_BLOCK_SIZE);
98
99 /* start pad with 1 */
100 sctx->buf[index] = 0x80;
101
102 /* pad with zeros */
103 index++;
104 memset(sctx->buf + index, 0x00, end - index - 8);
105
106 /* append message length */
107 bits = sctx->count * 8;
108 memcpy(sctx->buf + end - 8, &bits, sizeof(bits));
109
110 ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf, end);
111 BUG_ON(ret != end);
112
113 /* copy digest to out */
114 memcpy(out, sctx->state, SHA256_DIGEST_SIZE);
115
116 /* wipe context */
117 memset(sctx, 0, sizeof *sctx);
118} 41}
119 42
120static struct crypto_alg alg = { 43static struct crypto_alg alg = {
@@ -123,14 +46,14 @@ static struct crypto_alg alg = {
123 .cra_priority = CRYPT_S390_PRIORITY, 46 .cra_priority = CRYPT_S390_PRIORITY,
124 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 47 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
125 .cra_blocksize = SHA256_BLOCK_SIZE, 48 .cra_blocksize = SHA256_BLOCK_SIZE,
126 .cra_ctxsize = sizeof(struct s390_sha256_ctx), 49 .cra_ctxsize = sizeof(struct s390_sha_ctx),
127 .cra_module = THIS_MODULE, 50 .cra_module = THIS_MODULE,
128 .cra_list = LIST_HEAD_INIT(alg.cra_list), 51 .cra_list = LIST_HEAD_INIT(alg.cra_list),
129 .cra_u = { .digest = { 52 .cra_u = { .digest = {
130 .dia_digestsize = SHA256_DIGEST_SIZE, 53 .dia_digestsize = SHA256_DIGEST_SIZE,
131 .dia_init = sha256_init, 54 .dia_init = sha256_init,
132 .dia_update = sha256_update, 55 .dia_update = s390_sha_update,
133 .dia_final = sha256_final } } 56 .dia_final = s390_sha_final } }
134}; 57};
135 58
136static int sha256_s390_init(void) 59static int sha256_s390_init(void)
@@ -150,6 +73,5 @@ module_init(sha256_s390_init);
150module_exit(sha256_s390_fini); 73module_exit(sha256_s390_fini);
151 74
152MODULE_ALIAS("sha256"); 75MODULE_ALIAS("sha256");
153
154MODULE_LICENSE("GPL"); 76MODULE_LICENSE("GPL");
155MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm"); 77MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm");
diff --git a/arch/s390/crypto/sha_common.c b/arch/s390/crypto/sha_common.c
new file mode 100644
index 000000000000..80b6f2ba005e
--- /dev/null
+++ b/arch/s390/crypto/sha_common.c
@@ -0,0 +1,90 @@
1/*
2 * Cryptographic API.
3 *
4 * s390 generic implementation of the SHA Secure Hash Algorithms.
5 *
6 * Copyright IBM Corp. 2007
7 * Author(s): Jan Glauber (jang@de.ibm.com)
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
16#include <linux/crypto.h>
17#include "sha.h"
18#include "crypt_s390.h"
19
20void s390_sha_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
21{
22 struct s390_sha_ctx *ctx = crypto_tfm_ctx(tfm);
23 unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
24 unsigned int index;
25 int ret;
26
27 /* how much is already in the buffer? */
28 index = ctx->count & (bsize - 1);
29 ctx->count += len;
30
31 if ((index + len) < bsize)
32 goto store;
33
34 /* process one stored block */
35 if (index) {
36 memcpy(ctx->buf + index, data, bsize - index);
37 ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, bsize);
38 BUG_ON(ret != bsize);
39 data += bsize - index;
40 len -= bsize - index;
41 }
42
43 /* process as many blocks as possible */
44 if (len >= bsize) {
45 ret = crypt_s390_kimd(ctx->func, ctx->state, data,
46 len & ~(bsize - 1));
47 BUG_ON(ret != (len & ~(bsize - 1)));
48 data += ret;
49 len -= ret;
50 }
51store:
52 if (len)
53 memcpy(ctx->buf + index , data, len);
54}
55EXPORT_SYMBOL_GPL(s390_sha_update);
56
57void s390_sha_final(struct crypto_tfm *tfm, u8 *out)
58{
59 struct s390_sha_ctx *ctx = crypto_tfm_ctx(tfm);
60 unsigned int bsize = crypto_tfm_alg_blocksize(tfm);
61 u64 bits;
62 unsigned int index, end;
63 int ret;
64
65 /* must perform manual padding */
66 index = ctx->count & (bsize - 1);
67 end = (index < bsize - 8) ? bsize : (2 * bsize);
68
69 /* start pad with 1 */
70 ctx->buf[index] = 0x80;
71 index++;
72
73 /* pad with zeros */
74 memset(ctx->buf + index, 0x00, end - index - 8);
75
76 bits = ctx->count * 8;
77 memcpy(ctx->buf + end - 8, &bits, sizeof(bits));
78
79 ret = crypt_s390_kimd(ctx->func, ctx->state, ctx->buf, end);
80 BUG_ON(ret != end);
81
82 /* copy digest to out */
83 memcpy(out, ctx->state, crypto_hash_digestsize(crypto_hash_cast(tfm)));
84 /* wipe context */
85 memset(ctx, 0, sizeof *ctx);
86}
87EXPORT_SYMBOL_GPL(s390_sha_final);
88
89MODULE_LICENSE("GPL");
90MODULE_DESCRIPTION("s390 SHA cipher common functions");