diff options
author | Jordan Crouse <jordan.crouse@amd.com> | 2006-10-04 04:48:57 -0400 |
---|---|---|
committer | David S. Miller <davem@sunset.davemloft.net> | 2006-12-06 21:38:46 -0500 |
commit | 9fe757b0cfcee0724027a675c533077287a21b96 (patch) | |
tree | a68942e39e350e78b0b9e0fcfa701b9ddbcf5a70 /drivers/crypto/geode-aes.c | |
parent | 94b9bb5480e73cec4552b19fc3f809742b4ebf67 (diff) |
[PATCH] crypto: Add support for the Geode LX AES hardware
Add a driver to support the AES hardware on the Geode LX processor.
Signed-off-by: Jordan Crouse <jordan.crouse@amd.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto/geode-aes.c')
-rw-r--r-- | drivers/crypto/geode-aes.c | 474 |
1 files changed, 474 insertions, 0 deletions
diff --git a/drivers/crypto/geode-aes.c b/drivers/crypto/geode-aes.c new file mode 100644 index 000000000000..da2d35db8b48 --- /dev/null +++ b/drivers/crypto/geode-aes.c | |||
@@ -0,0 +1,474 @@ | |||
1 | /* Copyright (C) 2004-2006, Advanced Micro Devices, Inc. | ||
2 | * | ||
3 | * This program is free software; you can redistribute it and/or modify | ||
4 | * it under the terms of the GNU General Public License as published by | ||
5 | * the Free Software Foundation; either version 2 of the License, or | ||
6 | * (at your option) any later version. | ||
7 | */ | ||
8 | |||
9 | #include <linux/module.h> | ||
10 | #include <linux/kernel.h> | ||
11 | #include <linux/sched.h> | ||
12 | #include <linux/pci.h> | ||
13 | #include <linux/pci_ids.h> | ||
14 | #include <linux/crypto.h> | ||
15 | #include <linux/spinlock.h> | ||
16 | #include <crypto/algapi.h> | ||
17 | |||
18 | #include <asm/io.h> | ||
19 | #include <asm/delay.h> | ||
20 | |||
21 | #include "geode-aes.h" | ||
22 | |||
23 | /* Register definitions */ | ||
24 | |||
25 | #define AES_CTRLA_REG 0x0000 | ||
26 | |||
27 | #define AES_CTRL_START 0x01 | ||
28 | #define AES_CTRL_DECRYPT 0x00 | ||
29 | #define AES_CTRL_ENCRYPT 0x02 | ||
30 | #define AES_CTRL_WRKEY 0x04 | ||
31 | #define AES_CTRL_DCA 0x08 | ||
32 | #define AES_CTRL_SCA 0x10 | ||
33 | #define AES_CTRL_CBC 0x20 | ||
34 | |||
35 | #define AES_INTR_REG 0x0008 | ||
36 | |||
37 | #define AES_INTRA_PENDING (1 << 16) | ||
38 | #define AES_INTRB_PENDING (1 << 17) | ||
39 | |||
40 | #define AES_INTR_PENDING (AES_INTRA_PENDING | AES_INTRB_PENDING) | ||
41 | #define AES_INTR_MASK 0x07 | ||
42 | |||
43 | #define AES_SOURCEA_REG 0x0010 | ||
44 | #define AES_DSTA_REG 0x0014 | ||
45 | #define AES_LENA_REG 0x0018 | ||
46 | #define AES_WRITEKEY0_REG 0x0030 | ||
47 | #define AES_WRITEIV0_REG 0x0040 | ||
48 | |||
49 | /* A very large counter that is used to gracefully bail out of an | ||
50 | * operation in case of trouble | ||
51 | */ | ||
52 | |||
53 | #define AES_OP_TIMEOUT 0x50000 | ||
54 | |||
55 | /* Static structures */ | ||
56 | |||
57 | static void __iomem * _iobase; | ||
58 | static spinlock_t lock; | ||
59 | |||
60 | /* Write a 128 bit field (either a writable key or IV) */ | ||
61 | static inline void | ||
62 | _writefield(u32 offset, void *value) | ||
63 | { | ||
64 | int i; | ||
65 | for(i = 0; i < 4; i++) | ||
66 | iowrite32(((u32 *) value)[i], _iobase + offset + (i * 4)); | ||
67 | } | ||
68 | |||
69 | /* Read a 128 bit field (either a writable key or IV) */ | ||
70 | static inline void | ||
71 | _readfield(u32 offset, void *value) | ||
72 | { | ||
73 | int i; | ||
74 | for(i = 0; i < 4; i++) | ||
75 | ((u32 *) value)[i] = ioread32(_iobase + offset + (i * 4)); | ||
76 | } | ||
77 | |||
78 | static int | ||
79 | do_crypt(void *src, void *dst, int len, u32 flags) | ||
80 | { | ||
81 | u32 status; | ||
82 | u32 counter = AES_OP_TIMEOUT; | ||
83 | |||
84 | iowrite32(virt_to_phys(src), _iobase + AES_SOURCEA_REG); | ||
85 | iowrite32(virt_to_phys(dst), _iobase + AES_DSTA_REG); | ||
86 | iowrite32(len, _iobase + AES_LENA_REG); | ||
87 | |||
88 | /* Start the operation */ | ||
89 | iowrite32(AES_CTRL_START | flags, _iobase + AES_CTRLA_REG); | ||
90 | |||
91 | do | ||
92 | status = ioread32(_iobase + AES_INTR_REG); | ||
93 | while(!(status & AES_INTRA_PENDING) && --counter); | ||
94 | |||
95 | /* Clear the event */ | ||
96 | iowrite32((status & 0xFF) | AES_INTRA_PENDING, _iobase + AES_INTR_REG); | ||
97 | return counter ? 0 : 1; | ||
98 | } | ||
99 | |||
100 | unsigned int | ||
101 | geode_aes_crypt(struct geode_aes_op *op) | ||
102 | { | ||
103 | |||
104 | u32 flags = 0; | ||
105 | int iflags; | ||
106 | |||
107 | if (op->len == 0 || op->src == op->dst) | ||
108 | return 0; | ||
109 | |||
110 | if (op->flags & AES_FLAGS_COHERENT) | ||
111 | flags |= (AES_CTRL_DCA | AES_CTRL_SCA); | ||
112 | |||
113 | if (op->dir == AES_DIR_ENCRYPT) | ||
114 | flags |= AES_CTRL_ENCRYPT; | ||
115 | |||
116 | /* Start the critical section */ | ||
117 | |||
118 | spin_lock_irqsave(&lock, iflags); | ||
119 | |||
120 | if (op->mode == AES_MODE_CBC) { | ||
121 | flags |= AES_CTRL_CBC; | ||
122 | _writefield(AES_WRITEIV0_REG, op->iv); | ||
123 | } | ||
124 | |||
125 | if (op->flags & AES_FLAGS_USRKEY) { | ||
126 | flags |= AES_CTRL_WRKEY; | ||
127 | _writefield(AES_WRITEKEY0_REG, op->key); | ||
128 | } | ||
129 | |||
130 | do_crypt(op->src, op->dst, op->len, flags); | ||
131 | |||
132 | if (op->mode == AES_MODE_CBC) | ||
133 | _readfield(AES_WRITEIV0_REG, op->iv); | ||
134 | |||
135 | spin_unlock_irqrestore(&lock, iflags); | ||
136 | |||
137 | return op->len; | ||
138 | } | ||
139 | |||
140 | /* CRYPTO-API Functions */ | ||
141 | |||
142 | static int | ||
143 | geode_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int len) | ||
144 | { | ||
145 | struct geode_aes_op *op = crypto_tfm_ctx(tfm); | ||
146 | |||
147 | if (len != AES_KEY_LENGTH) { | ||
148 | tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; | ||
149 | return -EINVAL; | ||
150 | } | ||
151 | |||
152 | memcpy(op->key, key, len); | ||
153 | return 0; | ||
154 | } | ||
155 | |||
156 | static void | ||
157 | geode_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) | ||
158 | { | ||
159 | struct geode_aes_op *op = crypto_tfm_ctx(tfm); | ||
160 | |||
161 | if ((out == NULL) || (in == NULL)) | ||
162 | return; | ||
163 | |||
164 | op->src = (void *) in; | ||
165 | op->dst = (void *) out; | ||
166 | op->mode = AES_MODE_ECB; | ||
167 | op->flags = 0; | ||
168 | op->len = AES_MIN_BLOCK_SIZE; | ||
169 | op->dir = AES_DIR_ENCRYPT; | ||
170 | |||
171 | geode_aes_crypt(op); | ||
172 | } | ||
173 | |||
174 | |||
175 | static void | ||
176 | geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) | ||
177 | { | ||
178 | struct geode_aes_op *op = crypto_tfm_ctx(tfm); | ||
179 | |||
180 | if ((out == NULL) || (in == NULL)) | ||
181 | return; | ||
182 | |||
183 | op->src = (void *) in; | ||
184 | op->dst = (void *) out; | ||
185 | op->mode = AES_MODE_ECB; | ||
186 | op->flags = 0; | ||
187 | op->len = AES_MIN_BLOCK_SIZE; | ||
188 | op->dir = AES_DIR_DECRYPT; | ||
189 | |||
190 | geode_aes_crypt(op); | ||
191 | } | ||
192 | |||
193 | |||
194 | static struct crypto_alg geode_alg = { | ||
195 | .cra_name = "aes", | ||
196 | .cra_driver_name = "geode-aes-128", | ||
197 | .cra_priority = 300, | ||
198 | .cra_alignmask = 15, | ||
199 | .cra_flags = CRYPTO_ALG_TYPE_CIPHER, | ||
200 | .cra_blocksize = AES_MIN_BLOCK_SIZE, | ||
201 | .cra_ctxsize = sizeof(struct geode_aes_op), | ||
202 | .cra_module = THIS_MODULE, | ||
203 | .cra_list = LIST_HEAD_INIT(geode_alg.cra_list), | ||
204 | .cra_u = { | ||
205 | .cipher = { | ||
206 | .cia_min_keysize = AES_KEY_LENGTH, | ||
207 | .cia_max_keysize = AES_KEY_LENGTH, | ||
208 | .cia_setkey = geode_setkey, | ||
209 | .cia_encrypt = geode_encrypt, | ||
210 | .cia_decrypt = geode_decrypt | ||
211 | } | ||
212 | } | ||
213 | }; | ||
214 | |||
215 | static int | ||
216 | geode_cbc_decrypt(struct blkcipher_desc *desc, | ||
217 | struct scatterlist *dst, struct scatterlist *src, | ||
218 | unsigned int nbytes) | ||
219 | { | ||
220 | struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm); | ||
221 | struct blkcipher_walk walk; | ||
222 | int err, ret; | ||
223 | |||
224 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
225 | err = blkcipher_walk_virt(desc, &walk); | ||
226 | |||
227 | while((nbytes = walk.nbytes)) { | ||
228 | op->src = walk.src.virt.addr, | ||
229 | op->dst = walk.dst.virt.addr; | ||
230 | op->mode = AES_MODE_CBC; | ||
231 | op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE); | ||
232 | op->dir = AES_DIR_DECRYPT; | ||
233 | |||
234 | memcpy(op->iv, walk.iv, AES_IV_LENGTH); | ||
235 | |||
236 | ret = geode_aes_crypt(op); | ||
237 | |||
238 | memcpy(walk.iv, op->iv, AES_IV_LENGTH); | ||
239 | nbytes -= ret; | ||
240 | |||
241 | err = blkcipher_walk_done(desc, &walk, nbytes); | ||
242 | } | ||
243 | |||
244 | return err; | ||
245 | } | ||
246 | |||
247 | static int | ||
248 | geode_cbc_encrypt(struct blkcipher_desc *desc, | ||
249 | struct scatterlist *dst, struct scatterlist *src, | ||
250 | unsigned int nbytes) | ||
251 | { | ||
252 | struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm); | ||
253 | struct blkcipher_walk walk; | ||
254 | int err, ret; | ||
255 | |||
256 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
257 | err = blkcipher_walk_virt(desc, &walk); | ||
258 | |||
259 | while((nbytes = walk.nbytes)) { | ||
260 | op->src = walk.src.virt.addr, | ||
261 | op->dst = walk.dst.virt.addr; | ||
262 | op->mode = AES_MODE_CBC; | ||
263 | op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE); | ||
264 | op->dir = AES_DIR_ENCRYPT; | ||
265 | |||
266 | memcpy(op->iv, walk.iv, AES_IV_LENGTH); | ||
267 | |||
268 | ret = geode_aes_crypt(op); | ||
269 | nbytes -= ret; | ||
270 | err = blkcipher_walk_done(desc, &walk, nbytes); | ||
271 | } | ||
272 | |||
273 | return err; | ||
274 | } | ||
275 | |||
276 | static struct crypto_alg geode_cbc_alg = { | ||
277 | .cra_name = "cbc(aes)", | ||
278 | .cra_driver_name = "cbc-aes-geode-128", | ||
279 | .cra_priority = 400, | ||
280 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
281 | .cra_blocksize = AES_MIN_BLOCK_SIZE, | ||
282 | .cra_ctxsize = sizeof(struct geode_aes_op), | ||
283 | .cra_alignmask = 15, | ||
284 | .cra_type = &crypto_blkcipher_type, | ||
285 | .cra_module = THIS_MODULE, | ||
286 | .cra_list = LIST_HEAD_INIT(geode_cbc_alg.cra_list), | ||
287 | .cra_u = { | ||
288 | .blkcipher = { | ||
289 | .min_keysize = AES_KEY_LENGTH, | ||
290 | .max_keysize = AES_KEY_LENGTH, | ||
291 | .setkey = geode_setkey, | ||
292 | .encrypt = geode_cbc_encrypt, | ||
293 | .decrypt = geode_cbc_decrypt, | ||
294 | } | ||
295 | } | ||
296 | }; | ||
297 | |||
298 | static int | ||
299 | geode_ecb_decrypt(struct blkcipher_desc *desc, | ||
300 | struct scatterlist *dst, struct scatterlist *src, | ||
301 | unsigned int nbytes) | ||
302 | { | ||
303 | struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm); | ||
304 | struct blkcipher_walk walk; | ||
305 | int err, ret; | ||
306 | |||
307 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
308 | err = blkcipher_walk_virt(desc, &walk); | ||
309 | |||
310 | while((nbytes = walk.nbytes)) { | ||
311 | op->src = walk.src.virt.addr, | ||
312 | op->dst = walk.dst.virt.addr; | ||
313 | op->mode = AES_MODE_ECB; | ||
314 | op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE); | ||
315 | op->dir = AES_DIR_DECRYPT; | ||
316 | |||
317 | ret = geode_aes_crypt(op); | ||
318 | nbytes -= ret; | ||
319 | err = blkcipher_walk_done(desc, &walk, nbytes); | ||
320 | } | ||
321 | |||
322 | return err; | ||
323 | } | ||
324 | |||
325 | static int | ||
326 | geode_ecb_encrypt(struct blkcipher_desc *desc, | ||
327 | struct scatterlist *dst, struct scatterlist *src, | ||
328 | unsigned int nbytes) | ||
329 | { | ||
330 | struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm); | ||
331 | struct blkcipher_walk walk; | ||
332 | int err, ret; | ||
333 | |||
334 | blkcipher_walk_init(&walk, dst, src, nbytes); | ||
335 | err = blkcipher_walk_virt(desc, &walk); | ||
336 | |||
337 | while((nbytes = walk.nbytes)) { | ||
338 | op->src = walk.src.virt.addr, | ||
339 | op->dst = walk.dst.virt.addr; | ||
340 | op->mode = AES_MODE_ECB; | ||
341 | op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE); | ||
342 | op->dir = AES_DIR_ENCRYPT; | ||
343 | |||
344 | ret = geode_aes_crypt(op); | ||
345 | nbytes -= ret; | ||
346 | ret = blkcipher_walk_done(desc, &walk, nbytes); | ||
347 | } | ||
348 | |||
349 | return err; | ||
350 | } | ||
351 | |||
352 | static struct crypto_alg geode_ecb_alg = { | ||
353 | .cra_name = "ecb(aes)", | ||
354 | .cra_driver_name = "ecb-aes-geode-128", | ||
355 | .cra_priority = 400, | ||
356 | .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, | ||
357 | .cra_blocksize = AES_MIN_BLOCK_SIZE, | ||
358 | .cra_ctxsize = sizeof(struct geode_aes_op), | ||
359 | .cra_alignmask = 15, | ||
360 | .cra_type = &crypto_blkcipher_type, | ||
361 | .cra_module = THIS_MODULE, | ||
362 | .cra_list = LIST_HEAD_INIT(geode_ecb_alg.cra_list), | ||
363 | .cra_u = { | ||
364 | .blkcipher = { | ||
365 | .min_keysize = AES_KEY_LENGTH, | ||
366 | .max_keysize = AES_KEY_LENGTH, | ||
367 | .setkey = geode_setkey, | ||
368 | .encrypt = geode_ecb_encrypt, | ||
369 | .decrypt = geode_ecb_decrypt, | ||
370 | } | ||
371 | } | ||
372 | }; | ||
373 | |||
374 | static void | ||
375 | geode_aes_remove(struct pci_dev *dev) | ||
376 | { | ||
377 | crypto_unregister_alg(&geode_alg); | ||
378 | crypto_unregister_alg(&geode_ecb_alg); | ||
379 | crypto_unregister_alg(&geode_cbc_alg); | ||
380 | |||
381 | pci_iounmap(dev, _iobase); | ||
382 | _iobase = NULL; | ||
383 | |||
384 | pci_release_regions(dev); | ||
385 | pci_disable_device(dev); | ||
386 | } | ||
387 | |||
388 | |||
389 | static int | ||
390 | geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id) | ||
391 | { | ||
392 | int ret; | ||
393 | |||
394 | if ((ret = pci_enable_device(dev))) | ||
395 | return ret; | ||
396 | |||
397 | if ((ret = pci_request_regions(dev, "geode-aes-128"))) | ||
398 | goto eenable; | ||
399 | |||
400 | _iobase = pci_iomap(dev, 0, 0); | ||
401 | |||
402 | if (_iobase == NULL) { | ||
403 | ret = -ENOMEM; | ||
404 | goto erequest; | ||
405 | } | ||
406 | |||
407 | spin_lock_init(&lock); | ||
408 | |||
409 | /* Clear any pending activity */ | ||
410 | iowrite32(AES_INTR_PENDING | AES_INTR_MASK, _iobase + AES_INTR_REG); | ||
411 | |||
412 | if ((ret = crypto_register_alg(&geode_alg))) | ||
413 | goto eiomap; | ||
414 | |||
415 | if ((ret = crypto_register_alg(&geode_ecb_alg))) | ||
416 | goto ealg; | ||
417 | |||
418 | if ((ret = crypto_register_alg(&geode_cbc_alg))) | ||
419 | goto eecb; | ||
420 | |||
421 | printk(KERN_NOTICE "geode-aes: GEODE AES engine enabled.\n"); | ||
422 | return 0; | ||
423 | |||
424 | eecb: | ||
425 | crypto_unregister_alg(&geode_ecb_alg); | ||
426 | |||
427 | ealg: | ||
428 | crypto_unregister_alg(&geode_alg); | ||
429 | |||
430 | eiomap: | ||
431 | pci_iounmap(dev, _iobase); | ||
432 | |||
433 | erequest: | ||
434 | pci_release_regions(dev); | ||
435 | |||
436 | eenable: | ||
437 | pci_disable_device(dev); | ||
438 | |||
439 | printk(KERN_ERR "geode-aes: GEODE AES initialization failed.\n"); | ||
440 | return ret; | ||
441 | } | ||
442 | |||
443 | static struct pci_device_id geode_aes_tbl[] = { | ||
444 | { PCI_VENDOR_ID_AMD, PCI_DEVICE_ID_AMD_LX_AES, PCI_ANY_ID, PCI_ANY_ID} , | ||
445 | { 0, } | ||
446 | }; | ||
447 | |||
448 | MODULE_DEVICE_TABLE(pci, geode_aes_tbl); | ||
449 | |||
450 | static struct pci_driver geode_aes_driver = { | ||
451 | .name = "Geode LX AES", | ||
452 | .id_table = geode_aes_tbl, | ||
453 | .probe = geode_aes_probe, | ||
454 | .remove = __devexit_p(geode_aes_remove) | ||
455 | }; | ||
456 | |||
457 | static int __init | ||
458 | geode_aes_init(void) | ||
459 | { | ||
460 | return pci_module_init(&geode_aes_driver); | ||
461 | } | ||
462 | |||
463 | static void __exit | ||
464 | geode_aes_exit(void) | ||
465 | { | ||
466 | pci_unregister_driver(&geode_aes_driver); | ||
467 | } | ||
468 | |||
469 | MODULE_AUTHOR("Advanced Micro Devices, Inc."); | ||
470 | MODULE_DESCRIPTION("Geode LX Hardware AES driver"); | ||
471 | MODULE_LICENSE("GPL"); | ||
472 | |||
473 | module_init(geode_aes_init); | ||
474 | module_exit(geode_aes_exit); | ||