aboutsummaryrefslogtreecommitdiffstats
path: root/drivers
diff options
context:
space:
mode:
authorSebastian Siewior <sebastian@breakpoint.cc>2007-11-10 06:29:33 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2008-01-10 16:16:11 -0500
commitcd7c3bfe54270f41ac52be6b725a7194d99175b4 (patch)
tree03e9378557ffb7c8e38c452b7fd637587284b518 /drivers
parent5157dea8139cf0edc4834d528531e642c0d27e37 (diff)
[CRYPTO] geode: Add fallback for unsupported modes
The Geode AES crypto engine supports only 128 bit long key. This patch adds fallback for other key sizes which are required by the AES standard. Signed-off-by: Sebastian Siewior <sebastian@breakpoint.cc> Acked-by: Jordan Crouse <jordan.crouse@amd.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers')
-rw-r--r--drivers/crypto/geode-aes.c243
-rw-r--r--drivers/crypto/geode-aes.h6
2 files changed, 206 insertions, 43 deletions
diff --git a/drivers/crypto/geode-aes.c b/drivers/crypto/geode-aes.c
index 181d42c2deed..0ca92d414667 100644
--- a/drivers/crypto/geode-aes.c
+++ b/drivers/crypto/geode-aes.c
@@ -113,18 +113,103 @@ geode_aes_crypt(struct geode_aes_op *op)
113 113
114/* CRYPTO-API Functions */ 114/* CRYPTO-API Functions */
115 115
116static int 116static int geode_setkey_cip(struct crypto_tfm *tfm, const u8 *key,
117geode_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int len) 117 unsigned int len)
118{ 118{
119 struct geode_aes_op *op = crypto_tfm_ctx(tfm); 119 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
120 unsigned int ret;
121
122 op->keylen = len;
123
124 if (len == AES_KEYSIZE_128) {
125 memcpy(op->key, key, len);
126 return 0;
127 }
120 128
121 if (len != AES_KEY_LENGTH) { 129 if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) {
130 /* not supported at all */
122 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 131 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
123 return -EINVAL; 132 return -EINVAL;
124 } 133 }
125 134
126 memcpy(op->key, key, len); 135 /*
127 return 0; 136 * The requested key size is not supported by HW, do a fallback
137 */
138 op->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
139 op->fallback.blk->base.crt_flags |= (tfm->crt_flags & CRYPTO_TFM_REQ_MASK);
140
141 ret = crypto_cipher_setkey(op->fallback.cip, key, len);
142 if (ret) {
143 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
144 tfm->crt_flags |= (op->fallback.blk->base.crt_flags & CRYPTO_TFM_RES_MASK);
145 }
146 return ret;
147}
148
149static int geode_setkey_blk(struct crypto_tfm *tfm, const u8 *key,
150 unsigned int len)
151{
152 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
153 unsigned int ret;
154
155 op->keylen = len;
156
157 if (len == AES_KEYSIZE_128) {
158 memcpy(op->key, key, len);
159 return 0;
160 }
161
162 if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) {
163 /* not supported at all */
164 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
165 return -EINVAL;
166 }
167
168 /*
169 * The requested key size is not supported by HW, do a fallback
170 */
171 op->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
172 op->fallback.blk->base.crt_flags |= (tfm->crt_flags & CRYPTO_TFM_REQ_MASK);
173
174 ret = crypto_blkcipher_setkey(op->fallback.blk, key, len);
175 if (ret) {
176 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
177 tfm->crt_flags |= (op->fallback.blk->base.crt_flags & CRYPTO_TFM_RES_MASK);
178 }
179 return ret;
180}
181
182static int fallback_blk_dec(struct blkcipher_desc *desc,
183 struct scatterlist *dst, struct scatterlist *src,
184 unsigned int nbytes)
185{
186 unsigned int ret;
187 struct crypto_blkcipher *tfm;
188 struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
189
190 tfm = desc->tfm;
191 desc->tfm = op->fallback.blk;
192
193 ret = crypto_blkcipher_decrypt(desc, dst, src, nbytes);
194
195 desc->tfm = tfm;
196 return ret;
197}
198static int fallback_blk_enc(struct blkcipher_desc *desc,
199 struct scatterlist *dst, struct scatterlist *src,
200 unsigned int nbytes)
201{
202 unsigned int ret;
203 struct crypto_blkcipher *tfm;
204 struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
205
206 tfm = desc->tfm;
207 desc->tfm = op->fallback.blk;
208
209 ret = crypto_blkcipher_encrypt(desc, dst, src, nbytes);
210
211 desc->tfm = tfm;
212 return ret;
128} 213}
129 214
130static void 215static void
@@ -132,8 +217,10 @@ geode_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
132{ 217{
133 struct geode_aes_op *op = crypto_tfm_ctx(tfm); 218 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
134 219
135 if ((out == NULL) || (in == NULL)) 220 if (unlikely(op->keylen != AES_KEYSIZE_128)) {
221 crypto_cipher_encrypt_one(op->fallback.cip, out, in);
136 return; 222 return;
223 }
137 224
138 op->src = (void *) in; 225 op->src = (void *) in;
139 op->dst = (void *) out; 226 op->dst = (void *) out;
@@ -151,8 +238,10 @@ geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
151{ 238{
152 struct geode_aes_op *op = crypto_tfm_ctx(tfm); 239 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
153 240
154 if ((out == NULL) || (in == NULL)) 241 if (unlikely(op->keylen != AES_KEYSIZE_128)) {
242 crypto_cipher_decrypt_one(op->fallback.cip, out, in);
155 return; 243 return;
244 }
156 245
157 op->src = (void *) in; 246 op->src = (void *) in;
158 op->dst = (void *) out; 247 op->dst = (void *) out;
@@ -164,24 +253,50 @@ geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
164 geode_aes_crypt(op); 253 geode_aes_crypt(op);
165} 254}
166 255
256static int fallback_init_cip(struct crypto_tfm *tfm)
257{
258 const char *name = tfm->__crt_alg->cra_name;
259 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
260
261 op->fallback.cip = crypto_alloc_cipher(name, 0,
262 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
263
264 if (IS_ERR(op->fallback.cip)) {
265 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
266 return PTR_ERR(op->fallback.blk);
267 }
268
269 return 0;
270}
271
272static void fallback_exit_cip(struct crypto_tfm *tfm)
273{
274 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
275
276 crypto_free_cipher(op->fallback.cip);
277 op->fallback.cip = NULL;
278}
167 279
168static struct crypto_alg geode_alg = { 280static struct crypto_alg geode_alg = {
169 .cra_name = "aes", 281 .cra_name = "aes",
170 .cra_driver_name = "geode-aes-128", 282 .cra_driver_name = "geode-aes",
171 .cra_priority = 300, 283 .cra_priority = 300,
172 .cra_alignmask = 15, 284 .cra_alignmask = 15,
173 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 285 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
286 CRYPTO_ALG_NEED_FALLBACK,
287 .cra_init = fallback_init_cip,
288 .cra_exit = fallback_exit_cip,
174 .cra_blocksize = AES_MIN_BLOCK_SIZE, 289 .cra_blocksize = AES_MIN_BLOCK_SIZE,
175 .cra_ctxsize = sizeof(struct geode_aes_op), 290 .cra_ctxsize = sizeof(struct geode_aes_op),
176 .cra_module = THIS_MODULE, 291 .cra_module = THIS_MODULE,
177 .cra_list = LIST_HEAD_INIT(geode_alg.cra_list), 292 .cra_list = LIST_HEAD_INIT(geode_alg.cra_list),
178 .cra_u = { 293 .cra_u = {
179 .cipher = { 294 .cipher = {
180 .cia_min_keysize = AES_KEY_LENGTH, 295 .cia_min_keysize = AES_MIN_KEY_SIZE,
181 .cia_max_keysize = AES_KEY_LENGTH, 296 .cia_max_keysize = AES_MAX_KEY_SIZE,
182 .cia_setkey = geode_setkey, 297 .cia_setkey = geode_setkey_cip,
183 .cia_encrypt = geode_encrypt, 298 .cia_encrypt = geode_encrypt,
184 .cia_decrypt = geode_decrypt 299 .cia_decrypt = geode_decrypt
185 } 300 }
186 } 301 }
187}; 302};
@@ -195,6 +310,9 @@ geode_cbc_decrypt(struct blkcipher_desc *desc,
195 struct blkcipher_walk walk; 310 struct blkcipher_walk walk;
196 int err, ret; 311 int err, ret;
197 312
313 if (unlikely(op->keylen != AES_KEYSIZE_128))
314 return fallback_blk_dec(desc, dst, src, nbytes);
315
198 blkcipher_walk_init(&walk, dst, src, nbytes); 316 blkcipher_walk_init(&walk, dst, src, nbytes);
199 err = blkcipher_walk_virt(desc, &walk); 317 err = blkcipher_walk_virt(desc, &walk);
200 memcpy(op->iv, walk.iv, AES_IV_LENGTH); 318 memcpy(op->iv, walk.iv, AES_IV_LENGTH);
@@ -225,6 +343,9 @@ geode_cbc_encrypt(struct blkcipher_desc *desc,
225 struct blkcipher_walk walk; 343 struct blkcipher_walk walk;
226 int err, ret; 344 int err, ret;
227 345
346 if (unlikely(op->keylen != AES_KEYSIZE_128))
347 return fallback_blk_enc(desc, dst, src, nbytes);
348
228 blkcipher_walk_init(&walk, dst, src, nbytes); 349 blkcipher_walk_init(&walk, dst, src, nbytes);
229 err = blkcipher_walk_virt(desc, &walk); 350 err = blkcipher_walk_virt(desc, &walk);
230 memcpy(op->iv, walk.iv, AES_IV_LENGTH); 351 memcpy(op->iv, walk.iv, AES_IV_LENGTH);
@@ -245,22 +366,49 @@ geode_cbc_encrypt(struct blkcipher_desc *desc,
245 return err; 366 return err;
246} 367}
247 368
369static int fallback_init_blk(struct crypto_tfm *tfm)
370{
371 const char *name = tfm->__crt_alg->cra_name;
372 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
373
374 op->fallback.blk = crypto_alloc_blkcipher(name, 0,
375 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
376
377 if (IS_ERR(op->fallback.blk)) {
378 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
379 return PTR_ERR(op->fallback.blk);
380 }
381
382 return 0;
383}
384
385static void fallback_exit_blk(struct crypto_tfm *tfm)
386{
387 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
388
389 crypto_free_blkcipher(op->fallback.blk);
390 op->fallback.blk = NULL;
391}
392
248static struct crypto_alg geode_cbc_alg = { 393static struct crypto_alg geode_cbc_alg = {
249 .cra_name = "cbc(aes)", 394 .cra_name = "cbc(aes)",
250 .cra_driver_name = "cbc-aes-geode-128", 395 .cra_driver_name = "cbc-aes-geode",
251 .cra_priority = 400, 396 .cra_priority = 400,
252 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 397 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
398 CRYPTO_ALG_NEED_FALLBACK,
399 .cra_init = fallback_init_blk,
400 .cra_exit = fallback_exit_blk,
253 .cra_blocksize = AES_MIN_BLOCK_SIZE, 401 .cra_blocksize = AES_MIN_BLOCK_SIZE,
254 .cra_ctxsize = sizeof(struct geode_aes_op), 402 .cra_ctxsize = sizeof(struct geode_aes_op),
255 .cra_alignmask = 15, 403 .cra_alignmask = 15,
256 .cra_type = &crypto_blkcipher_type, 404 .cra_type = &crypto_blkcipher_type,
257 .cra_module = THIS_MODULE, 405 .cra_module = THIS_MODULE,
258 .cra_list = LIST_HEAD_INIT(geode_cbc_alg.cra_list), 406 .cra_list = LIST_HEAD_INIT(geode_cbc_alg.cra_list),
259 .cra_u = { 407 .cra_u = {
260 .blkcipher = { 408 .blkcipher = {
261 .min_keysize = AES_KEY_LENGTH, 409 .min_keysize = AES_MIN_KEY_SIZE,
262 .max_keysize = AES_KEY_LENGTH, 410 .max_keysize = AES_MAX_KEY_SIZE,
263 .setkey = geode_setkey, 411 .setkey = geode_setkey_blk,
264 .encrypt = geode_cbc_encrypt, 412 .encrypt = geode_cbc_encrypt,
265 .decrypt = geode_cbc_decrypt, 413 .decrypt = geode_cbc_decrypt,
266 .ivsize = AES_IV_LENGTH, 414 .ivsize = AES_IV_LENGTH,
@@ -277,6 +425,9 @@ geode_ecb_decrypt(struct blkcipher_desc *desc,
277 struct blkcipher_walk walk; 425 struct blkcipher_walk walk;
278 int err, ret; 426 int err, ret;
279 427
428 if (unlikely(op->keylen != AES_KEYSIZE_128))
429 return fallback_blk_dec(desc, dst, src, nbytes);
430
280 blkcipher_walk_init(&walk, dst, src, nbytes); 431 blkcipher_walk_init(&walk, dst, src, nbytes);
281 err = blkcipher_walk_virt(desc, &walk); 432 err = blkcipher_walk_virt(desc, &walk);
282 433
@@ -304,6 +455,9 @@ geode_ecb_encrypt(struct blkcipher_desc *desc,
304 struct blkcipher_walk walk; 455 struct blkcipher_walk walk;
305 int err, ret; 456 int err, ret;
306 457
458 if (unlikely(op->keylen != AES_KEYSIZE_128))
459 return fallback_blk_enc(desc, dst, src, nbytes);
460
307 blkcipher_walk_init(&walk, dst, src, nbytes); 461 blkcipher_walk_init(&walk, dst, src, nbytes);
308 err = blkcipher_walk_virt(desc, &walk); 462 err = blkcipher_walk_virt(desc, &walk);
309 463
@@ -323,21 +477,24 @@ geode_ecb_encrypt(struct blkcipher_desc *desc,
323} 477}
324 478
325static struct crypto_alg geode_ecb_alg = { 479static struct crypto_alg geode_ecb_alg = {
326 .cra_name = "ecb(aes)", 480 .cra_name = "ecb(aes)",
327 .cra_driver_name = "ecb-aes-geode-128", 481 .cra_driver_name = "ecb-aes-geode",
328 .cra_priority = 400, 482 .cra_priority = 400,
329 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 483 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
484 CRYPTO_ALG_NEED_FALLBACK,
485 .cra_init = fallback_init_blk,
486 .cra_exit = fallback_exit_blk,
330 .cra_blocksize = AES_MIN_BLOCK_SIZE, 487 .cra_blocksize = AES_MIN_BLOCK_SIZE,
331 .cra_ctxsize = sizeof(struct geode_aes_op), 488 .cra_ctxsize = sizeof(struct geode_aes_op),
332 .cra_alignmask = 15, 489 .cra_alignmask = 15,
333 .cra_type = &crypto_blkcipher_type, 490 .cra_type = &crypto_blkcipher_type,
334 .cra_module = THIS_MODULE, 491 .cra_module = THIS_MODULE,
335 .cra_list = LIST_HEAD_INIT(geode_ecb_alg.cra_list), 492 .cra_list = LIST_HEAD_INIT(geode_ecb_alg.cra_list),
336 .cra_u = { 493 .cra_u = {
337 .blkcipher = { 494 .blkcipher = {
338 .min_keysize = AES_KEY_LENGTH, 495 .min_keysize = AES_MIN_KEY_SIZE,
339 .max_keysize = AES_KEY_LENGTH, 496 .max_keysize = AES_MAX_KEY_SIZE,
340 .setkey = geode_setkey, 497 .setkey = geode_setkey_blk,
341 .encrypt = geode_ecb_encrypt, 498 .encrypt = geode_ecb_encrypt,
342 .decrypt = geode_ecb_decrypt, 499 .decrypt = geode_ecb_decrypt,
343 } 500 }
@@ -367,7 +524,7 @@ geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id)
367 if ((ret = pci_enable_device(dev))) 524 if ((ret = pci_enable_device(dev)))
368 return ret; 525 return ret;
369 526
370 if ((ret = pci_request_regions(dev, "geode-aes-128"))) 527 if ((ret = pci_request_regions(dev, "geode-aes")))
371 goto eenable; 528 goto eenable;
372 529
373 _iobase = pci_iomap(dev, 0, 0); 530 _iobase = pci_iomap(dev, 0, 0);
diff --git a/drivers/crypto/geode-aes.h b/drivers/crypto/geode-aes.h
index 2f1d55982aac..14cc763da1e4 100644
--- a/drivers/crypto/geode-aes.h
+++ b/drivers/crypto/geode-aes.h
@@ -66,6 +66,12 @@ struct geode_aes_op {
66 66
67 u8 key[AES_KEY_LENGTH]; 67 u8 key[AES_KEY_LENGTH];
68 u8 iv[AES_IV_LENGTH]; 68 u8 iv[AES_IV_LENGTH];
69
70 union {
71 struct crypto_blkcipher *blk;
72 struct crypto_cipher *cip;
73 } fallback;
74 u32 keylen;
69}; 75};
70 76
71#endif 77#endif