aboutsummaryrefslogtreecommitdiffstats
path: root/arch/s390/crypto
diff options
context:
space:
mode:
authorJan Glauber <jan.glauber@de.ibm.com>2007-04-27 10:01:54 -0400
committerMartin Schwidefsky <schwidefsky@de.ibm.com>2007-04-27 10:01:46 -0400
commit131a395c18af43d824841642038e5cc0d48f0bd2 (patch)
treec3fec61634deee0589e3a0fa3aec0a63803f3815 /arch/s390/crypto
parent6d4740c89c187ee8f5ac7355c4eeffda26493d1f (diff)
[S390] crypto: cleanup.
Cleanup code and remove obsolete documentation. Signed-off-by: Jan Glauber <jan.glauber@de.ibm.com> Signed-off-by: Heiko Carstens <heiko.carstens@de.ibm.com> Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
Diffstat (limited to 'arch/s390/crypto')
-rw-r--r--arch/s390/crypto/sha1_s390.c129
-rw-r--r--arch/s390/crypto/sha256_s390.c38
2 files changed, 81 insertions, 86 deletions
diff --git a/arch/s390/crypto/sha1_s390.c b/arch/s390/crypto/sha1_s390.c
index 969639f31977..af4460ec381f 100644
--- a/arch/s390/crypto/sha1_s390.c
+++ b/arch/s390/crypto/sha1_s390.c
@@ -25,99 +25,100 @@
25 */ 25 */
26#include <linux/init.h> 26#include <linux/init.h>
27#include <linux/module.h> 27#include <linux/module.h>
28#include <linux/mm.h>
29#include <linux/crypto.h> 28#include <linux/crypto.h>
30#include <asm/scatterlist.h> 29
31#include <asm/byteorder.h>
32#include "crypt_s390.h" 30#include "crypt_s390.h"
33 31
34#define SHA1_DIGEST_SIZE 20 32#define SHA1_DIGEST_SIZE 20
35#define SHA1_BLOCK_SIZE 64 33#define SHA1_BLOCK_SIZE 64
36 34
37struct crypt_s390_sha1_ctx { 35struct s390_sha1_ctx {
38 u64 count; 36 u64 count; /* message length */
39 u32 state[5]; 37 u32 state[5];
40 u32 buf_len; 38 u8 buf[2 * SHA1_BLOCK_SIZE];
41 u8 buffer[2 * SHA1_BLOCK_SIZE];
42}; 39};
43 40
44static void sha1_init(struct crypto_tfm *tfm) 41static void sha1_init(struct crypto_tfm *tfm)
45{ 42{
46 struct crypt_s390_sha1_ctx *ctx = crypto_tfm_ctx(tfm); 43 struct s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm);
47 44
48 ctx->state[0] = 0x67452301; 45 sctx->state[0] = 0x67452301;
49 ctx->state[1] = 0xEFCDAB89; 46 sctx->state[1] = 0xEFCDAB89;
50 ctx->state[2] = 0x98BADCFE; 47 sctx->state[2] = 0x98BADCFE;
51 ctx->state[3] = 0x10325476; 48 sctx->state[3] = 0x10325476;
52 ctx->state[4] = 0xC3D2E1F0; 49 sctx->state[4] = 0xC3D2E1F0;
53 50 sctx->count = 0;
54 ctx->count = 0;
55 ctx->buf_len = 0;
56} 51}
57 52
58static void sha1_update(struct crypto_tfm *tfm, const u8 *data, 53static void sha1_update(struct crypto_tfm *tfm, const u8 *data,
59 unsigned int len) 54 unsigned int len)
60{ 55{
61 struct crypt_s390_sha1_ctx *sctx; 56 struct s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm);
62 long imd_len; 57 unsigned int index;
63 58 int ret;
64 sctx = crypto_tfm_ctx(tfm); 59
65 sctx->count += len * 8; /* message bit length */ 60 /* how much is already in the buffer? */
66 61 index = sctx->count & 0x3f;
67 /* anything in buffer yet? -> must be completed */ 62
68 if (sctx->buf_len && (sctx->buf_len + len) >= SHA1_BLOCK_SIZE) { 63 sctx->count += len;
69 /* complete full block and hash */ 64
70 memcpy(sctx->buffer + sctx->buf_len, data, 65 if (index + len < SHA1_BLOCK_SIZE)
71 SHA1_BLOCK_SIZE - sctx->buf_len); 66 goto store;
72 crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buffer, 67
73 SHA1_BLOCK_SIZE); 68 /* process one stored block */
74 data += SHA1_BLOCK_SIZE - sctx->buf_len; 69 if (index) {
75 len -= SHA1_BLOCK_SIZE - sctx->buf_len; 70 memcpy(sctx->buf + index, data, SHA1_BLOCK_SIZE - index);
76 sctx->buf_len = 0; 71 ret = crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buf,
72 SHA1_BLOCK_SIZE);
73 BUG_ON(ret != SHA1_BLOCK_SIZE);
74 data += SHA1_BLOCK_SIZE - index;
75 len -= SHA1_BLOCK_SIZE - index;
77 } 76 }
78 77
79 /* rest of data contains full blocks? */ 78 /* process as many blocks as possible */
80 imd_len = len & ~0x3ful; 79 if (len >= SHA1_BLOCK_SIZE) {
81 if (imd_len) { 80 ret = crypt_s390_kimd(KIMD_SHA_1, sctx->state, data,
82 crypt_s390_kimd(KIMD_SHA_1, sctx->state, data, imd_len); 81 len & ~(SHA1_BLOCK_SIZE - 1));
83 data += imd_len; 82 BUG_ON(ret != (len & ~(SHA1_BLOCK_SIZE - 1)));
84 len -= imd_len; 83 data += ret;
84 len -= ret;
85 } 85 }
86 /* anything left? store in buffer */
87 if (len) {
88 memcpy(sctx->buffer + sctx->buf_len , data, len);
89 sctx->buf_len += len;
90 }
91}
92 86
87store:
88 /* anything left? */
89 if (len)
90 memcpy(sctx->buf + index , data, len);
91}
93 92
94static void pad_message(struct crypt_s390_sha1_ctx* sctx) 93/* Add padding and return the message digest. */
94static void sha1_final(struct crypto_tfm *tfm, u8 *out)
95{ 95{
96 int index; 96 struct s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm);
97 u64 bits;
98 unsigned int index, end;
99 int ret;
100
101 /* must perform manual padding */
102 index = sctx->count & 0x3f;
103 end = (index < 56) ? SHA1_BLOCK_SIZE : (2 * SHA1_BLOCK_SIZE);
97 104
98 index = sctx->buf_len;
99 sctx->buf_len = (sctx->buf_len < 56) ?
100 SHA1_BLOCK_SIZE:2 * SHA1_BLOCK_SIZE;
101 /* start pad with 1 */ 105 /* start pad with 1 */
102 sctx->buffer[index] = 0x80; 106 sctx->buf[index] = 0x80;
107
103 /* pad with zeros */ 108 /* pad with zeros */
104 index++; 109 index++;
105 memset(sctx->buffer + index, 0x00, sctx->buf_len - index); 110 memset(sctx->buf + index, 0x00, end - index - 8);
106 /* append length */
107 memcpy(sctx->buffer + sctx->buf_len - 8, &sctx->count,
108 sizeof sctx->count);
109}
110 111
111/* Add padding and return the message digest. */ 112 /* append message length */
112static void sha1_final(struct crypto_tfm *tfm, u8 *out) 113 bits = sctx->count * 8;
113{ 114 memcpy(sctx->buf + end - 8, &bits, sizeof(bits));
114 struct crypt_s390_sha1_ctx *sctx = crypto_tfm_ctx(tfm); 115
116 ret = crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buf, end);
117 BUG_ON(ret != end);
115 118
116 /* must perform manual padding */
117 pad_message(sctx);
118 crypt_s390_kimd(KIMD_SHA_1, sctx->state, sctx->buffer, sctx->buf_len);
119 /* copy digest to out */ 119 /* copy digest to out */
120 memcpy(out, sctx->state, SHA1_DIGEST_SIZE); 120 memcpy(out, sctx->state, SHA1_DIGEST_SIZE);
121
121 /* wipe context */ 122 /* wipe context */
122 memset(sctx, 0, sizeof *sctx); 123 memset(sctx, 0, sizeof *sctx);
123} 124}
@@ -128,7 +129,7 @@ static struct crypto_alg alg = {
128 .cra_priority = CRYPT_S390_PRIORITY, 129 .cra_priority = CRYPT_S390_PRIORITY,
129 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 130 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
130 .cra_blocksize = SHA1_BLOCK_SIZE, 131 .cra_blocksize = SHA1_BLOCK_SIZE,
131 .cra_ctxsize = sizeof(struct crypt_s390_sha1_ctx), 132 .cra_ctxsize = sizeof(struct s390_sha1_ctx),
132 .cra_module = THIS_MODULE, 133 .cra_module = THIS_MODULE,
133 .cra_list = LIST_HEAD_INIT(alg.cra_list), 134 .cra_list = LIST_HEAD_INIT(alg.cra_list),
134 .cra_u = { .digest = { 135 .cra_u = { .digest = {
diff --git a/arch/s390/crypto/sha256_s390.c b/arch/s390/crypto/sha256_s390.c
index 78436c696d37..2ced3330bce0 100644
--- a/arch/s390/crypto/sha256_s390.c
+++ b/arch/s390/crypto/sha256_s390.c
@@ -26,7 +26,7 @@
26#define SHA256_BLOCK_SIZE 64 26#define SHA256_BLOCK_SIZE 64
27 27
28struct s390_sha256_ctx { 28struct s390_sha256_ctx {
29 u64 count; 29 u64 count; /* message length */
30 u32 state[8]; 30 u32 state[8];
31 u8 buf[2 * SHA256_BLOCK_SIZE]; 31 u8 buf[2 * SHA256_BLOCK_SIZE];
32}; 32};
@@ -54,10 +54,9 @@ static void sha256_update(struct crypto_tfm *tfm, const u8 *data,
54 int ret; 54 int ret;
55 55
56 /* how much is already in the buffer? */ 56 /* how much is already in the buffer? */
57 index = sctx->count / 8 & 0x3f; 57 index = sctx->count & 0x3f;
58 58
59 /* update message bit length */ 59 sctx->count += len;
60 sctx->count += len * 8;
61 60
62 if ((index + len) < SHA256_BLOCK_SIZE) 61 if ((index + len) < SHA256_BLOCK_SIZE)
63 goto store; 62 goto store;
@@ -87,12 +86,17 @@ store:
87 memcpy(sctx->buf + index , data, len); 86 memcpy(sctx->buf + index , data, len);
88} 87}
89 88
90static void pad_message(struct s390_sha256_ctx* sctx) 89/* Add padding and return the message digest */
90static void sha256_final(struct crypto_tfm *tfm, u8 *out)
91{ 91{
92 int index, end; 92 struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
93 u64 bits;
94 unsigned int index, end;
95 int ret;
93 96
94 index = sctx->count / 8 & 0x3f; 97 /* must perform manual padding */
95 end = index < 56 ? SHA256_BLOCK_SIZE : 2 * SHA256_BLOCK_SIZE; 98 index = sctx->count & 0x3f;
99 end = (index < 56) ? SHA256_BLOCK_SIZE : (2 * SHA256_BLOCK_SIZE);
96 100
97 /* start pad with 1 */ 101 /* start pad with 1 */
98 sctx->buf[index] = 0x80; 102 sctx->buf[index] = 0x80;
@@ -102,21 +106,11 @@ static void pad_message(struct s390_sha256_ctx* sctx)
102 memset(sctx->buf + index, 0x00, end - index - 8); 106 memset(sctx->buf + index, 0x00, end - index - 8);
103 107
104 /* append message length */ 108 /* append message length */
105 memcpy(sctx->buf + end - 8, &sctx->count, sizeof sctx->count); 109 bits = sctx->count * 8;
106 110 memcpy(sctx->buf + end - 8, &bits, sizeof(bits));
107 sctx->count = end * 8;
108}
109
110/* Add padding and return the message digest */
111static void sha256_final(struct crypto_tfm *tfm, u8 *out)
112{
113 struct s390_sha256_ctx *sctx = crypto_tfm_ctx(tfm);
114
115 /* must perform manual padding */
116 pad_message(sctx);
117 111
118 crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf, 112 ret = crypt_s390_kimd(KIMD_SHA_256, sctx->state, sctx->buf, end);
119 sctx->count / 8); 113 BUG_ON(ret != end);
120 114
121 /* copy digest to out */ 115 /* copy digest to out */
122 memcpy(out, sctx->state, SHA256_DIGEST_SIZE); 116 memcpy(out, sctx->state, SHA256_DIGEST_SIZE);