aboutsummaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2008-08-31 01:47:27 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2008-12-24 19:01:26 -0500
commit7b5a080b3c46f0cac71c0d0262634c6517d4ee4f (patch)
tree41ba9e7051d1ecd47eb5cd371209229db3202ab6 /crypto
parent7b0bac64cd5b74d6f1147524c26216de13a501fd (diff)
crypto: hash - Add shash interface
The shash interface replaces the current synchronous hash interface. It improves over hash in two ways. Firstly shash is reentrant, meaning that the same tfm may be used by two threads simultaneously as all hashing state is stored in a local descriptor. The other enhancement is that shash no longer takes scatter list entries. This is because shash is specifically designed for synchronous algorithms and as such scatter lists are unnecessary. All existing hash users will be converted to shash once the algorithms have been completely converted. There is also a new finup function that combines update with final. This will be extended to ahash once the algorithm conversion is done. This is also the first time that an algorithm type has their own registration function. Existing algorithm types will be converted to this way in due course. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/Makefile1
-rw-r--r--crypto/shash.c239
2 files changed, 240 insertions, 0 deletions
diff --git a/crypto/Makefile b/crypto/Makefile
index cd4a4ed078ff..46b08bf2035f 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -22,6 +22,7 @@ obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
22 22
23crypto_hash-objs := hash.o 23crypto_hash-objs := hash.o
24crypto_hash-objs += ahash.o 24crypto_hash-objs += ahash.o
25crypto_hash-objs += shash.o
25obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o 26obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o
26 27
27cryptomgr-objs := algboss.o testmgr.o 28cryptomgr-objs := algboss.o testmgr.o
diff --git a/crypto/shash.c b/crypto/shash.c
new file mode 100644
index 000000000000..82ec4bd8d2f5
--- /dev/null
+++ b/crypto/shash.c
@@ -0,0 +1,239 @@
1/*
2 * Synchronous Cryptographic Hash operations.
3 *
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#include <crypto/internal/hash.h>
14#include <linux/err.h>
15#include <linux/kernel.h>
16#include <linux/module.h>
17#include <linux/slab.h>
18#include <linux/seq_file.h>
19
20static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm)
21{
22 return container_of(tfm, struct crypto_shash, base);
23}
24
25static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
26 unsigned int keylen)
27{
28 struct shash_alg *shash = crypto_shash_alg(tfm);
29 unsigned long alignmask = crypto_shash_alignmask(tfm);
30 unsigned long absize;
31 u8 *buffer, *alignbuffer;
32 int err;
33
34 absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1));
35 buffer = kmalloc(absize, GFP_KERNEL);
36 if (!buffer)
37 return -ENOMEM;
38
39 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
40 memcpy(alignbuffer, key, keylen);
41 err = shash->setkey(tfm, alignbuffer, keylen);
42 memset(alignbuffer, 0, keylen);
43 kfree(buffer);
44 return err;
45}
46
47int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
48 unsigned int keylen)
49{
50 struct shash_alg *shash = crypto_shash_alg(tfm);
51 unsigned long alignmask = crypto_shash_alignmask(tfm);
52
53 if ((unsigned long)key & alignmask)
54 return shash_setkey_unaligned(tfm, key, keylen);
55
56 return shash->setkey(tfm, key, keylen);
57}
58EXPORT_SYMBOL_GPL(crypto_shash_setkey);
59
60static inline unsigned int shash_align_buffer_size(unsigned len,
61 unsigned long mask)
62{
63 return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
64}
65
66static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
67 unsigned int len)
68{
69 struct crypto_shash *tfm = desc->tfm;
70 struct shash_alg *shash = crypto_shash_alg(tfm);
71 unsigned long alignmask = crypto_shash_alignmask(tfm);
72 unsigned int unaligned_len = alignmask + 1 -
73 ((unsigned long)data & alignmask);
74 u8 buf[shash_align_buffer_size(unaligned_len, alignmask)]
75 __attribute__ ((aligned));
76
77 memcpy(buf, data, unaligned_len);
78
79 return shash->update(desc, buf, unaligned_len) ?:
80 shash->update(desc, data + unaligned_len, len - unaligned_len);
81}
82
83int crypto_shash_update(struct shash_desc *desc, const u8 *data,
84 unsigned int len)
85{
86 struct crypto_shash *tfm = desc->tfm;
87 struct shash_alg *shash = crypto_shash_alg(tfm);
88 unsigned long alignmask = crypto_shash_alignmask(tfm);
89
90 if ((unsigned long)data & alignmask)
91 return shash_update_unaligned(desc, data, len);
92
93 return shash->update(desc, data, len);
94}
95EXPORT_SYMBOL_GPL(crypto_shash_update);
96
97static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
98{
99 struct crypto_shash *tfm = desc->tfm;
100 unsigned long alignmask = crypto_shash_alignmask(tfm);
101 struct shash_alg *shash = crypto_shash_alg(tfm);
102 unsigned int ds = crypto_shash_digestsize(tfm);
103 u8 buf[shash_align_buffer_size(ds, alignmask)]
104 __attribute__ ((aligned));
105 int err;
106
107 err = shash->final(desc, buf);
108 memcpy(out, buf, ds);
109 return err;
110}
111
112int crypto_shash_final(struct shash_desc *desc, u8 *out)
113{
114 struct crypto_shash *tfm = desc->tfm;
115 struct shash_alg *shash = crypto_shash_alg(tfm);
116 unsigned long alignmask = crypto_shash_alignmask(tfm);
117
118 if ((unsigned long)out & alignmask)
119 return shash_final_unaligned(desc, out);
120
121 return shash->final(desc, out);
122}
123EXPORT_SYMBOL_GPL(crypto_shash_final);
124
125static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
126 unsigned int len, u8 *out)
127{
128 return crypto_shash_update(desc, data, len) ?:
129 crypto_shash_final(desc, out);
130}
131
132int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
133 unsigned int len, u8 *out)
134{
135 struct crypto_shash *tfm = desc->tfm;
136 struct shash_alg *shash = crypto_shash_alg(tfm);
137 unsigned long alignmask = crypto_shash_alignmask(tfm);
138
139 if (((unsigned long)data | (unsigned long)out) & alignmask ||
140 !shash->finup)
141 return shash_finup_unaligned(desc, data, len, out);
142
143 return shash->finup(desc, data, len, out);
144}
145EXPORT_SYMBOL_GPL(crypto_shash_finup);
146
147static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
148 unsigned int len, u8 *out)
149{
150 return crypto_shash_init(desc) ?:
151 crypto_shash_update(desc, data, len) ?:
152 crypto_shash_final(desc, out);
153}
154
155int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
156 unsigned int len, u8 *out)
157{
158 struct crypto_shash *tfm = desc->tfm;
159 struct shash_alg *shash = crypto_shash_alg(tfm);
160 unsigned long alignmask = crypto_shash_alignmask(tfm);
161
162 if (((unsigned long)data | (unsigned long)out) & alignmask ||
163 !shash->digest)
164 return shash_digest_unaligned(desc, data, len, out);
165
166 return shash->digest(desc, data, len, out);
167}
168EXPORT_SYMBOL_GPL(crypto_shash_digest);
169
170static int crypto_shash_init_tfm(struct crypto_tfm *tfm,
171 const struct crypto_type *frontend)
172{
173 if (frontend->type != CRYPTO_ALG_TYPE_SHASH)
174 return -EINVAL;
175 return 0;
176}
177
178static unsigned int crypto_shash_extsize(struct crypto_alg *alg,
179 const struct crypto_type *frontend)
180{
181 return alg->cra_ctxsize;
182}
183
184static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
185 __attribute__ ((unused));
186static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
187{
188 struct shash_alg *salg = __crypto_shash_alg(alg);
189
190 seq_printf(m, "type : shash\n");
191 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
192 seq_printf(m, "digestsize : %u\n", salg->digestsize);
193 seq_printf(m, "descsize : %u\n", salg->descsize);
194}
195
196static const struct crypto_type crypto_shash_type = {
197 .extsize = crypto_shash_extsize,
198 .init_tfm = crypto_shash_init_tfm,
199#ifdef CONFIG_PROC_FS
200 .show = crypto_shash_show,
201#endif
202 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
203 .maskset = CRYPTO_ALG_TYPE_MASK,
204 .type = CRYPTO_ALG_TYPE_SHASH,
205 .tfmsize = offsetof(struct crypto_shash, base),
206};
207
208struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
209 u32 mask)
210{
211 return __crypto_shash_cast(
212 crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask));
213}
214EXPORT_SYMBOL_GPL(crypto_alloc_shash);
215
216int crypto_register_shash(struct shash_alg *alg)
217{
218 struct crypto_alg *base = &alg->base;
219
220 if (alg->digestsize > PAGE_SIZE / 8 ||
221 alg->descsize > PAGE_SIZE / 8)
222 return -EINVAL;
223
224 base->cra_type = &crypto_shash_type;
225 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
226 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
227
228 return crypto_register_alg(base);
229}
230EXPORT_SYMBOL_GPL(crypto_register_shash);
231
232int crypto_unregister_shash(struct shash_alg *alg)
233{
234 return crypto_unregister_alg(&alg->base);
235}
236EXPORT_SYMBOL_GPL(crypto_unregister_shash);
237
238MODULE_LICENSE("GPL");
239MODULE_DESCRIPTION("Synchronous cryptographic hash type");