aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/padlock-sha.c91
1 files changed, 35 insertions, 56 deletions
diff --git a/drivers/crypto/padlock-sha.c b/drivers/crypto/padlock-sha.c
index b028db61c301..a781fd23b607 100644
--- a/drivers/crypto/padlock-sha.c
+++ b/drivers/crypto/padlock-sha.c
@@ -12,10 +12,11 @@
12 * 12 *
13 */ 13 */
14 14
15#include <crypto/algapi.h>
16#include <linux/err.h>
15#include <linux/module.h> 17#include <linux/module.h>
16#include <linux/init.h> 18#include <linux/init.h>
17#include <linux/errno.h> 19#include <linux/errno.h>
18#include <linux/crypto.h>
19#include <linux/cryptohash.h> 20#include <linux/cryptohash.h>
20#include <linux/interrupt.h> 21#include <linux/interrupt.h>
21#include <linux/kernel.h> 22#include <linux/kernel.h>
@@ -30,28 +31,17 @@
30#define SHA256_DIGEST_SIZE 32 31#define SHA256_DIGEST_SIZE 32
31#define SHA256_HMAC_BLOCK_SIZE 64 32#define SHA256_HMAC_BLOCK_SIZE 64
32 33
33static char *sha1_fallback = SHA1_DEFAULT_FALLBACK;
34static char *sha256_fallback = SHA256_DEFAULT_FALLBACK;
35
36module_param(sha1_fallback, charp, 0644);
37module_param(sha256_fallback, charp, 0644);
38
39MODULE_PARM_DESC(sha1_fallback, "Fallback driver for SHA1. Default is "
40 SHA1_DEFAULT_FALLBACK);
41MODULE_PARM_DESC(sha256_fallback, "Fallback driver for SHA256. Default is "
42 SHA256_DEFAULT_FALLBACK);
43
44struct padlock_sha_ctx { 34struct padlock_sha_ctx {
45 char *data; 35 char *data;
46 size_t used; 36 size_t used;
47 int bypass; 37 int bypass;
48 void (*f_sha_padlock)(const char *in, char *out, int count); 38 void (*f_sha_padlock)(const char *in, char *out, int count);
49 struct crypto_tfm *fallback_tfm; 39 struct hash_desc fallback;
50}; 40};
51 41
52static inline struct padlock_sha_ctx *ctx(struct crypto_tfm *tfm) 42static inline struct padlock_sha_ctx *ctx(struct crypto_tfm *tfm)
53{ 43{
54 return (struct padlock_sha_ctx *)(crypto_tfm_ctx(tfm)); 44 return crypto_tfm_ctx(tfm);
55} 45}
56 46
57/* We'll need aligned address on the stack */ 47/* We'll need aligned address on the stack */
@@ -65,14 +55,12 @@ static void padlock_sha_bypass(struct crypto_tfm *tfm)
65 if (ctx(tfm)->bypass) 55 if (ctx(tfm)->bypass)
66 return; 56 return;
67 57
68 BUG_ON(!ctx(tfm)->fallback_tfm); 58 crypto_hash_init(&ctx(tfm)->fallback);
69
70 crypto_digest_init(ctx(tfm)->fallback_tfm);
71 if (ctx(tfm)->data && ctx(tfm)->used) { 59 if (ctx(tfm)->data && ctx(tfm)->used) {
72 struct scatterlist sg; 60 struct scatterlist sg;
73 61
74 sg_set_buf(&sg, ctx(tfm)->data, ctx(tfm)->used); 62 sg_set_buf(&sg, ctx(tfm)->data, ctx(tfm)->used);
75 crypto_digest_update(ctx(tfm)->fallback_tfm, &sg, 1); 63 crypto_hash_update(&ctx(tfm)->fallback, &sg, sg.length);
76 } 64 }
77 65
78 ctx(tfm)->used = 0; 66 ctx(tfm)->used = 0;
@@ -95,9 +83,8 @@ static void padlock_sha_update(struct crypto_tfm *tfm,
95 83
96 if (unlikely(ctx(tfm)->bypass)) { 84 if (unlikely(ctx(tfm)->bypass)) {
97 struct scatterlist sg; 85 struct scatterlist sg;
98 BUG_ON(!ctx(tfm)->fallback_tfm);
99 sg_set_buf(&sg, (uint8_t *)data, length); 86 sg_set_buf(&sg, (uint8_t *)data, length);
100 crypto_digest_update(ctx(tfm)->fallback_tfm, &sg, 1); 87 crypto_hash_update(&ctx(tfm)->fallback, &sg, length);
101 return; 88 return;
102 } 89 }
103 90
@@ -160,8 +147,7 @@ static void padlock_do_sha256(const char *in, char *out, int count)
160static void padlock_sha_final(struct crypto_tfm *tfm, uint8_t *out) 147static void padlock_sha_final(struct crypto_tfm *tfm, uint8_t *out)
161{ 148{
162 if (unlikely(ctx(tfm)->bypass)) { 149 if (unlikely(ctx(tfm)->bypass)) {
163 BUG_ON(!ctx(tfm)->fallback_tfm); 150 crypto_hash_final(&ctx(tfm)->fallback, out);
164 crypto_digest_final(ctx(tfm)->fallback_tfm, out);
165 ctx(tfm)->bypass = 0; 151 ctx(tfm)->bypass = 0;
166 return; 152 return;
167 } 153 }
@@ -172,8 +158,11 @@ static void padlock_sha_final(struct crypto_tfm *tfm, uint8_t *out)
172 ctx(tfm)->used = 0; 158 ctx(tfm)->used = 0;
173} 159}
174 160
175static int padlock_cra_init(struct crypto_tfm *tfm, const char *fallback_driver_name) 161static int padlock_cra_init(struct crypto_tfm *tfm)
176{ 162{
163 const char *fallback_driver_name = tfm->__crt_alg->cra_name;
164 struct crypto_hash *fallback_tfm;
165
177 /* For now we'll allocate one page. This 166 /* For now we'll allocate one page. This
178 * could eventually be configurable one day. */ 167 * could eventually be configurable one day. */
179 ctx(tfm)->data = (char *)__get_free_page(GFP_KERNEL); 168 ctx(tfm)->data = (char *)__get_free_page(GFP_KERNEL);
@@ -181,14 +170,17 @@ static int padlock_cra_init(struct crypto_tfm *tfm, const char *fallback_driver_
181 return -ENOMEM; 170 return -ENOMEM;
182 171
183 /* Allocate a fallback and abort if it failed. */ 172 /* Allocate a fallback and abort if it failed. */
184 ctx(tfm)->fallback_tfm = crypto_alloc_tfm(fallback_driver_name, 0); 173 fallback_tfm = crypto_alloc_hash(fallback_driver_name, 0,
185 if (!ctx(tfm)->fallback_tfm) { 174 CRYPTO_ALG_ASYNC |
175 CRYPTO_ALG_NEED_FALLBACK);
176 if (IS_ERR(fallback_tfm)) {
186 printk(KERN_WARNING PFX "Fallback driver '%s' could not be loaded!\n", 177 printk(KERN_WARNING PFX "Fallback driver '%s' could not be loaded!\n",
187 fallback_driver_name); 178 fallback_driver_name);
188 free_page((unsigned long)(ctx(tfm)->data)); 179 free_page((unsigned long)(ctx(tfm)->data));
189 return -ENOENT; 180 return PTR_ERR(fallback_tfm);
190 } 181 }
191 182
183 ctx(tfm)->fallback.tfm = fallback_tfm;
192 return 0; 184 return 0;
193} 185}
194 186
@@ -196,14 +188,14 @@ static int padlock_sha1_cra_init(struct crypto_tfm *tfm)
196{ 188{
197 ctx(tfm)->f_sha_padlock = padlock_do_sha1; 189 ctx(tfm)->f_sha_padlock = padlock_do_sha1;
198 190
199 return padlock_cra_init(tfm, sha1_fallback); 191 return padlock_cra_init(tfm);
200} 192}
201 193
202static int padlock_sha256_cra_init(struct crypto_tfm *tfm) 194static int padlock_sha256_cra_init(struct crypto_tfm *tfm)
203{ 195{
204 ctx(tfm)->f_sha_padlock = padlock_do_sha256; 196 ctx(tfm)->f_sha_padlock = padlock_do_sha256;
205 197
206 return padlock_cra_init(tfm, sha256_fallback); 198 return padlock_cra_init(tfm);
207} 199}
208 200
209static void padlock_cra_exit(struct crypto_tfm *tfm) 201static void padlock_cra_exit(struct crypto_tfm *tfm)
@@ -213,16 +205,16 @@ static void padlock_cra_exit(struct crypto_tfm *tfm)
213 ctx(tfm)->data = NULL; 205 ctx(tfm)->data = NULL;
214 } 206 }
215 207
216 BUG_ON(!ctx(tfm)->fallback_tfm); 208 crypto_free_hash(ctx(tfm)->fallback.tfm);
217 crypto_free_tfm(ctx(tfm)->fallback_tfm); 209 ctx(tfm)->fallback.tfm = NULL;
218 ctx(tfm)->fallback_tfm = NULL;
219} 210}
220 211
221static struct crypto_alg sha1_alg = { 212static struct crypto_alg sha1_alg = {
222 .cra_name = "sha1", 213 .cra_name = "sha1",
223 .cra_driver_name = "sha1-padlock", 214 .cra_driver_name = "sha1-padlock",
224 .cra_priority = PADLOCK_CRA_PRIORITY, 215 .cra_priority = PADLOCK_CRA_PRIORITY,
225 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 216 .cra_flags = CRYPTO_ALG_TYPE_DIGEST |
217 CRYPTO_ALG_NEED_FALLBACK,
226 .cra_blocksize = SHA1_HMAC_BLOCK_SIZE, 218 .cra_blocksize = SHA1_HMAC_BLOCK_SIZE,
227 .cra_ctxsize = sizeof(struct padlock_sha_ctx), 219 .cra_ctxsize = sizeof(struct padlock_sha_ctx),
228 .cra_module = THIS_MODULE, 220 .cra_module = THIS_MODULE,
@@ -243,7 +235,8 @@ static struct crypto_alg sha256_alg = {
243 .cra_name = "sha256", 235 .cra_name = "sha256",
244 .cra_driver_name = "sha256-padlock", 236 .cra_driver_name = "sha256-padlock",
245 .cra_priority = PADLOCK_CRA_PRIORITY, 237 .cra_priority = PADLOCK_CRA_PRIORITY,
246 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 238 .cra_flags = CRYPTO_ALG_TYPE_DIGEST |
239 CRYPTO_ALG_NEED_FALLBACK,
247 .cra_blocksize = SHA256_HMAC_BLOCK_SIZE, 240 .cra_blocksize = SHA256_HMAC_BLOCK_SIZE,
248 .cra_ctxsize = sizeof(struct padlock_sha_ctx), 241 .cra_ctxsize = sizeof(struct padlock_sha_ctx),
249 .cra_module = THIS_MODULE, 242 .cra_module = THIS_MODULE,
@@ -262,29 +255,15 @@ static struct crypto_alg sha256_alg = {
262 255
263static void __init padlock_sha_check_fallbacks(void) 256static void __init padlock_sha_check_fallbacks(void)
264{ 257{
265 struct crypto_tfm *tfm; 258 if (!crypto_has_hash("sha1", 0, CRYPTO_ALG_ASYNC |
266 259 CRYPTO_ALG_NEED_FALLBACK))
267 /* We'll try to allocate one TFM for each fallback 260 printk(KERN_WARNING PFX
268 * to test that the modules are available. */ 261 "Couldn't load fallback module for sha1.\n");
269 tfm = crypto_alloc_tfm(sha1_fallback, 0); 262
270 if (!tfm) { 263 if (!crypto_has_hash("sha256", 0, CRYPTO_ALG_ASYNC |
271 printk(KERN_WARNING PFX "Couldn't load fallback module for '%s'. Tried '%s'.\n", 264 CRYPTO_ALG_NEED_FALLBACK))
272 sha1_alg.cra_name, sha1_fallback); 265 printk(KERN_WARNING PFX
273 } else { 266 "Couldn't load fallback module for sha256.\n");
274 printk(KERN_NOTICE PFX "Fallback for '%s' is driver '%s' (prio=%d)\n", sha1_alg.cra_name,
275 crypto_tfm_alg_driver_name(tfm), crypto_tfm_alg_priority(tfm));
276 crypto_free_tfm(tfm);
277 }
278
279 tfm = crypto_alloc_tfm(sha256_fallback, 0);
280 if (!tfm) {
281 printk(KERN_WARNING PFX "Couldn't load fallback module for '%s'. Tried '%s'.\n",
282 sha256_alg.cra_name, sha256_fallback);
283 } else {
284 printk(KERN_NOTICE PFX "Fallback for '%s' is driver '%s' (prio=%d)\n", sha256_alg.cra_name,
285 crypto_tfm_alg_driver_name(tfm), crypto_tfm_alg_priority(tfm));
286 crypto_free_tfm(tfm);
287 }
288} 267}
289 268
290static int __init padlock_init(void) 269static int __init padlock_init(void)