diff options
Diffstat (limited to 'crypto/shash.c')
-rw-r--r-- | crypto/shash.c | 239 |
1 files changed, 239 insertions, 0 deletions
diff --git a/crypto/shash.c b/crypto/shash.c new file mode 100644 index 000000000000..82ec4bd8d2f5 --- /dev/null +++ b/crypto/shash.c | |||
@@ -0,0 +1,239 @@ | |||
1 | /* | ||
2 | * Synchronous Cryptographic Hash operations. | ||
3 | * | ||
4 | * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> | ||
5 | * | ||
6 | * This program is free software; you can redistribute it and/or modify it | ||
7 | * under the terms of the GNU General Public License as published by the Free | ||
8 | * Software Foundation; either version 2 of the License, or (at your option) | ||
9 | * any later version. | ||
10 | * | ||
11 | */ | ||
12 | |||
13 | #include <crypto/internal/hash.h> | ||
14 | #include <linux/err.h> | ||
15 | #include <linux/kernel.h> | ||
16 | #include <linux/module.h> | ||
17 | #include <linux/slab.h> | ||
18 | #include <linux/seq_file.h> | ||
19 | |||
20 | static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm) | ||
21 | { | ||
22 | return container_of(tfm, struct crypto_shash, base); | ||
23 | } | ||
24 | |||
25 | static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, | ||
26 | unsigned int keylen) | ||
27 | { | ||
28 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
29 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
30 | unsigned long absize; | ||
31 | u8 *buffer, *alignbuffer; | ||
32 | int err; | ||
33 | |||
34 | absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1)); | ||
35 | buffer = kmalloc(absize, GFP_KERNEL); | ||
36 | if (!buffer) | ||
37 | return -ENOMEM; | ||
38 | |||
39 | alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | ||
40 | memcpy(alignbuffer, key, keylen); | ||
41 | err = shash->setkey(tfm, alignbuffer, keylen); | ||
42 | memset(alignbuffer, 0, keylen); | ||
43 | kfree(buffer); | ||
44 | return err; | ||
45 | } | ||
46 | |||
47 | int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, | ||
48 | unsigned int keylen) | ||
49 | { | ||
50 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
51 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
52 | |||
53 | if ((unsigned long)key & alignmask) | ||
54 | return shash_setkey_unaligned(tfm, key, keylen); | ||
55 | |||
56 | return shash->setkey(tfm, key, keylen); | ||
57 | } | ||
58 | EXPORT_SYMBOL_GPL(crypto_shash_setkey); | ||
59 | |||
60 | static inline unsigned int shash_align_buffer_size(unsigned len, | ||
61 | unsigned long mask) | ||
62 | { | ||
63 | return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1)); | ||
64 | } | ||
65 | |||
66 | static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, | ||
67 | unsigned int len) | ||
68 | { | ||
69 | struct crypto_shash *tfm = desc->tfm; | ||
70 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
71 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
72 | unsigned int unaligned_len = alignmask + 1 - | ||
73 | ((unsigned long)data & alignmask); | ||
74 | u8 buf[shash_align_buffer_size(unaligned_len, alignmask)] | ||
75 | __attribute__ ((aligned)); | ||
76 | |||
77 | memcpy(buf, data, unaligned_len); | ||
78 | |||
79 | return shash->update(desc, buf, unaligned_len) ?: | ||
80 | shash->update(desc, data + unaligned_len, len - unaligned_len); | ||
81 | } | ||
82 | |||
83 | int crypto_shash_update(struct shash_desc *desc, const u8 *data, | ||
84 | unsigned int len) | ||
85 | { | ||
86 | struct crypto_shash *tfm = desc->tfm; | ||
87 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
88 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
89 | |||
90 | if ((unsigned long)data & alignmask) | ||
91 | return shash_update_unaligned(desc, data, len); | ||
92 | |||
93 | return shash->update(desc, data, len); | ||
94 | } | ||
95 | EXPORT_SYMBOL_GPL(crypto_shash_update); | ||
96 | |||
97 | static int shash_final_unaligned(struct shash_desc *desc, u8 *out) | ||
98 | { | ||
99 | struct crypto_shash *tfm = desc->tfm; | ||
100 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
101 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
102 | unsigned int ds = crypto_shash_digestsize(tfm); | ||
103 | u8 buf[shash_align_buffer_size(ds, alignmask)] | ||
104 | __attribute__ ((aligned)); | ||
105 | int err; | ||
106 | |||
107 | err = shash->final(desc, buf); | ||
108 | memcpy(out, buf, ds); | ||
109 | return err; | ||
110 | } | ||
111 | |||
112 | int crypto_shash_final(struct shash_desc *desc, u8 *out) | ||
113 | { | ||
114 | struct crypto_shash *tfm = desc->tfm; | ||
115 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
116 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
117 | |||
118 | if ((unsigned long)out & alignmask) | ||
119 | return shash_final_unaligned(desc, out); | ||
120 | |||
121 | return shash->final(desc, out); | ||
122 | } | ||
123 | EXPORT_SYMBOL_GPL(crypto_shash_final); | ||
124 | |||
125 | static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, | ||
126 | unsigned int len, u8 *out) | ||
127 | { | ||
128 | return crypto_shash_update(desc, data, len) ?: | ||
129 | crypto_shash_final(desc, out); | ||
130 | } | ||
131 | |||
132 | int crypto_shash_finup(struct shash_desc *desc, const u8 *data, | ||
133 | unsigned int len, u8 *out) | ||
134 | { | ||
135 | struct crypto_shash *tfm = desc->tfm; | ||
136 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
137 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
138 | |||
139 | if (((unsigned long)data | (unsigned long)out) & alignmask || | ||
140 | !shash->finup) | ||
141 | return shash_finup_unaligned(desc, data, len, out); | ||
142 | |||
143 | return shash->finup(desc, data, len, out); | ||
144 | } | ||
145 | EXPORT_SYMBOL_GPL(crypto_shash_finup); | ||
146 | |||
147 | static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, | ||
148 | unsigned int len, u8 *out) | ||
149 | { | ||
150 | return crypto_shash_init(desc) ?: | ||
151 | crypto_shash_update(desc, data, len) ?: | ||
152 | crypto_shash_final(desc, out); | ||
153 | } | ||
154 | |||
155 | int crypto_shash_digest(struct shash_desc *desc, const u8 *data, | ||
156 | unsigned int len, u8 *out) | ||
157 | { | ||
158 | struct crypto_shash *tfm = desc->tfm; | ||
159 | struct shash_alg *shash = crypto_shash_alg(tfm); | ||
160 | unsigned long alignmask = crypto_shash_alignmask(tfm); | ||
161 | |||
162 | if (((unsigned long)data | (unsigned long)out) & alignmask || | ||
163 | !shash->digest) | ||
164 | return shash_digest_unaligned(desc, data, len, out); | ||
165 | |||
166 | return shash->digest(desc, data, len, out); | ||
167 | } | ||
168 | EXPORT_SYMBOL_GPL(crypto_shash_digest); | ||
169 | |||
170 | static int crypto_shash_init_tfm(struct crypto_tfm *tfm, | ||
171 | const struct crypto_type *frontend) | ||
172 | { | ||
173 | if (frontend->type != CRYPTO_ALG_TYPE_SHASH) | ||
174 | return -EINVAL; | ||
175 | return 0; | ||
176 | } | ||
177 | |||
178 | static unsigned int crypto_shash_extsize(struct crypto_alg *alg, | ||
179 | const struct crypto_type *frontend) | ||
180 | { | ||
181 | return alg->cra_ctxsize; | ||
182 | } | ||
183 | |||
184 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | ||
185 | __attribute__ ((unused)); | ||
186 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) | ||
187 | { | ||
188 | struct shash_alg *salg = __crypto_shash_alg(alg); | ||
189 | |||
190 | seq_printf(m, "type : shash\n"); | ||
191 | seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); | ||
192 | seq_printf(m, "digestsize : %u\n", salg->digestsize); | ||
193 | seq_printf(m, "descsize : %u\n", salg->descsize); | ||
194 | } | ||
195 | |||
196 | static const struct crypto_type crypto_shash_type = { | ||
197 | .extsize = crypto_shash_extsize, | ||
198 | .init_tfm = crypto_shash_init_tfm, | ||
199 | #ifdef CONFIG_PROC_FS | ||
200 | .show = crypto_shash_show, | ||
201 | #endif | ||
202 | .maskclear = ~CRYPTO_ALG_TYPE_MASK, | ||
203 | .maskset = CRYPTO_ALG_TYPE_MASK, | ||
204 | .type = CRYPTO_ALG_TYPE_SHASH, | ||
205 | .tfmsize = offsetof(struct crypto_shash, base), | ||
206 | }; | ||
207 | |||
208 | struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, | ||
209 | u32 mask) | ||
210 | { | ||
211 | return __crypto_shash_cast( | ||
212 | crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask)); | ||
213 | } | ||
214 | EXPORT_SYMBOL_GPL(crypto_alloc_shash); | ||
215 | |||
216 | int crypto_register_shash(struct shash_alg *alg) | ||
217 | { | ||
218 | struct crypto_alg *base = &alg->base; | ||
219 | |||
220 | if (alg->digestsize > PAGE_SIZE / 8 || | ||
221 | alg->descsize > PAGE_SIZE / 8) | ||
222 | return -EINVAL; | ||
223 | |||
224 | base->cra_type = &crypto_shash_type; | ||
225 | base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; | ||
226 | base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; | ||
227 | |||
228 | return crypto_register_alg(base); | ||
229 | } | ||
230 | EXPORT_SYMBOL_GPL(crypto_register_shash); | ||
231 | |||
232 | int crypto_unregister_shash(struct shash_alg *alg) | ||
233 | { | ||
234 | return crypto_unregister_alg(&alg->base); | ||
235 | } | ||
236 | EXPORT_SYMBOL_GPL(crypto_unregister_shash); | ||
237 | |||
238 | MODULE_LICENSE("GPL"); | ||
239 | MODULE_DESCRIPTION("Synchronous cryptographic hash type"); | ||