aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/misc/tegra-cryptodev.c
diff options
context:
space:
mode:
authorJonathan Herman <hermanjl@cs.unc.edu>2013-01-22 10:38:37 -0500
committerJonathan Herman <hermanjl@cs.unc.edu>2013-01-22 10:38:37 -0500
commitfcc9d2e5a6c89d22b8b773a64fb4ad21ac318446 (patch)
treea57612d1888735a2ec7972891b68c1ac5ec8faea /drivers/misc/tegra-cryptodev.c
parent8dea78da5cee153b8af9c07a2745f6c55057fe12 (diff)
Added missing tegra files.HEADmaster
Diffstat (limited to 'drivers/misc/tegra-cryptodev.c')
-rw-r--r--drivers/misc/tegra-cryptodev.c349
1 files changed, 349 insertions, 0 deletions
diff --git a/drivers/misc/tegra-cryptodev.c b/drivers/misc/tegra-cryptodev.c
new file mode 100644
index 00000000000..d5ed6a22dda
--- /dev/null
+++ b/drivers/misc/tegra-cryptodev.c
@@ -0,0 +1,349 @@
1/*
2 * drivers/misc/tegra-cryptodev.c
3 *
4 * crypto dev node for NVIDIA tegra aes hardware
5 *
6 * Copyright (c) 2010, NVIDIA Corporation.
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
12 *
13 * This program is distributed in the hope that it will be useful, but WITHOUT
14 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
16 * more details.
17 *
18 * You should have received a copy of the GNU General Public License along
19 * with this program; if not, write to the Free Software Foundation, Inc.,
20 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
21 */
22
23#include <linux/module.h>
24#include <linux/init.h>
25#include <linux/errno.h>
26#include <linux/kernel.h>
27#include <linux/slab.h>
28#include <linux/fs.h>
29#include <linux/miscdevice.h>
30#include <linux/crypto.h>
31#include <linux/scatterlist.h>
32#include <linux/uaccess.h>
33#include <crypto/rng.h>
34
35#include "tegra-cryptodev.h"
36
37#define NBUFS 2
38
39struct tegra_crypto_ctx {
40 struct crypto_ablkcipher *ecb_tfm;
41 struct crypto_ablkcipher *cbc_tfm;
42 struct crypto_rng *rng;
43 u8 seed[TEGRA_CRYPTO_RNG_SEED_SIZE];
44 int use_ssk;
45};
46
47struct tegra_crypto_completion {
48 struct completion restart;
49 int req_err;
50};
51
52static int alloc_bufs(unsigned long *buf[NBUFS])
53{
54 int i;
55
56 for (i = 0; i < NBUFS; i++) {
57 buf[i] = (void *)__get_free_page(GFP_KERNEL);
58 if (!buf[i])
59 goto err_free_buf;
60 }
61
62 return 0;
63
64err_free_buf:
65 while (i-- > 0)
66 free_page((unsigned long)buf[i]);
67
68 return -ENOMEM;
69}
70
71static void free_bufs(unsigned long *buf[NBUFS])
72{
73 int i;
74
75 for (i = 0; i < NBUFS; i++)
76 free_page((unsigned long)buf[i]);
77}
78
79static int tegra_crypto_dev_open(struct inode *inode, struct file *filp)
80{
81 struct tegra_crypto_ctx *ctx;
82 int ret = 0;
83
84 ctx = kzalloc(sizeof(struct tegra_crypto_ctx), GFP_KERNEL);
85 if (!ctx) {
86 pr_err("no memory for context\n");
87 return -ENOMEM;
88 }
89
90 ctx->ecb_tfm = crypto_alloc_ablkcipher("ecb-aes-tegra",
91 CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 0);
92 if (IS_ERR(ctx->ecb_tfm)) {
93 pr_err("Failed to load transform for ecb-aes-tegra: %ld\n",
94 PTR_ERR(ctx->ecb_tfm));
95 ret = PTR_ERR(ctx->ecb_tfm);
96 goto fail_ecb;
97 }
98
99 ctx->cbc_tfm = crypto_alloc_ablkcipher("cbc-aes-tegra",
100 CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 0);
101 if (IS_ERR(ctx->cbc_tfm)) {
102 pr_err("Failed to load transform for cbc-aes-tegra: %ld\n",
103 PTR_ERR(ctx->cbc_tfm));
104 ret = PTR_ERR(ctx->cbc_tfm);
105 goto fail_cbc;
106 }
107
108 ctx->rng = crypto_alloc_rng("rng-aes-tegra", CRYPTO_ALG_TYPE_RNG, 0);
109 if (IS_ERR(ctx->rng)) {
110 pr_err("Failed to load transform for tegra rng: %ld\n",
111 PTR_ERR(ctx->rng));
112 ret = PTR_ERR(ctx->rng);
113 goto fail_rng;
114 }
115
116 filp->private_data = ctx;
117 return ret;
118
119fail_rng:
120 crypto_free_ablkcipher(ctx->cbc_tfm);
121
122fail_cbc:
123 crypto_free_ablkcipher(ctx->ecb_tfm);
124
125fail_ecb:
126 kfree(ctx);
127 return ret;
128}
129
130static int tegra_crypto_dev_release(struct inode *inode, struct file *filp)
131{
132 struct tegra_crypto_ctx *ctx = filp->private_data;
133
134 crypto_free_ablkcipher(ctx->ecb_tfm);
135 crypto_free_ablkcipher(ctx->cbc_tfm);
136 crypto_free_rng(ctx->rng);
137 kfree(ctx);
138 filp->private_data = NULL;
139 return 0;
140}
141
142static void tegra_crypt_complete(struct crypto_async_request *req, int err)
143{
144 struct tegra_crypto_completion *done = req->data;
145
146 if (err != -EINPROGRESS) {
147 done->req_err = err;
148 complete(&done->restart);
149 }
150}
151
152static int process_crypt_req(struct tegra_crypto_ctx *ctx, struct tegra_crypt_req *crypt_req)
153{
154 struct crypto_ablkcipher *tfm;
155 struct ablkcipher_request *req = NULL;
156 struct scatterlist in_sg;
157 struct scatterlist out_sg;
158 unsigned long *xbuf[NBUFS];
159 int ret = 0, size = 0;
160 unsigned long total = 0;
161 struct tegra_crypto_completion tcrypt_complete;
162 const u8 *key = NULL;
163
164 if (crypt_req->op & TEGRA_CRYPTO_ECB) {
165 req = ablkcipher_request_alloc(ctx->ecb_tfm, GFP_KERNEL);
166 tfm = ctx->ecb_tfm;
167 } else {
168 req = ablkcipher_request_alloc(ctx->cbc_tfm, GFP_KERNEL);
169 tfm = ctx->cbc_tfm;
170 }
171 if (!req) {
172 pr_err("%s: Failed to allocate request\n", __func__);
173 return -ENOMEM;
174 }
175
176 if ((crypt_req->keylen < 0) || (crypt_req->keylen > AES_MAX_KEY_SIZE))
177 return -EINVAL;
178
179 crypto_ablkcipher_clear_flags(tfm, ~0);
180
181 if (!ctx->use_ssk)
182 key = crypt_req->key;
183
184 ret = crypto_ablkcipher_setkey(tfm, key, crypt_req->keylen);
185 if (ret < 0) {
186 pr_err("setkey failed");
187 goto process_req_out;
188 }
189
190 ret = alloc_bufs(xbuf);
191 if (ret < 0) {
192 pr_err("alloc_bufs failed");
193 goto process_req_out;
194 }
195
196 init_completion(&tcrypt_complete.restart);
197
198 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
199 tegra_crypt_complete, &tcrypt_complete);
200
201 total = crypt_req->plaintext_sz;
202 while (total > 0) {
203 size = min(total, PAGE_SIZE);
204 ret = copy_from_user((void *)xbuf[0],
205 (void __user *)crypt_req->plaintext, size);
206 if (ret < 0) {
207 pr_debug("%s: copy_from_user failed (%d)\n", __func__, ret);
208 goto process_req_buf_out;
209 }
210 sg_init_one(&in_sg, xbuf[0], size);
211 sg_init_one(&out_sg, xbuf[1], size);
212
213 ablkcipher_request_set_crypt(req, &in_sg,
214 &out_sg, size, crypt_req->iv);
215
216 INIT_COMPLETION(tcrypt_complete.restart);
217 tcrypt_complete.req_err = 0;
218 ret = crypt_req->encrypt ?
219 crypto_ablkcipher_encrypt(req) :
220 crypto_ablkcipher_decrypt(req);
221
222 if ((ret == -EINPROGRESS) || (ret == -EBUSY)) {
223 /* crypto driver is asynchronous */
224 ret = wait_for_completion_interruptible(&tcrypt_complete.restart);
225
226 if (ret < 0)
227 goto process_req_buf_out;
228
229 if (tcrypt_complete.req_err < 0) {
230 ret = tcrypt_complete.req_err;
231 goto process_req_buf_out;
232 }
233 } else if (ret < 0) {
234 pr_debug("%scrypt failed (%d)\n",
235 crypt_req->encrypt ? "en" : "de", ret);
236 goto process_req_buf_out;
237 }
238
239 ret = copy_to_user((void __user *)crypt_req->result,
240 (const void *)xbuf[1], size);
241 if (ret < 0)
242 goto process_req_buf_out;
243
244 total -= size;
245 crypt_req->result += size;
246 crypt_req->plaintext += size;
247 }
248
249process_req_buf_out:
250 free_bufs(xbuf);
251process_req_out:
252 ablkcipher_request_free(req);
253
254 return ret;
255}
256
257static long tegra_crypto_dev_ioctl(struct file *filp,
258 unsigned int ioctl_num, unsigned long arg)
259{
260 struct tegra_crypto_ctx *ctx = filp->private_data;
261 struct tegra_crypt_req crypt_req;
262 struct tegra_rng_req rng_req;
263 char *rng;
264 int ret = 0;
265
266 switch (ioctl_num) {
267 case TEGRA_CRYPTO_IOCTL_NEED_SSK:
268 ctx->use_ssk = (int)arg;
269 break;
270
271 case TEGRA_CRYPTO_IOCTL_PROCESS_REQ:
272 ret = copy_from_user(&crypt_req, (void __user *)arg, sizeof(crypt_req));
273 if (ret < 0) {
274 pr_err("%s: copy_from_user fail(%d)\n", __func__, ret);
275 break;
276 }
277
278 ret = process_crypt_req(ctx, &crypt_req);
279 break;
280
281 case TEGRA_CRYPTO_IOCTL_SET_SEED:
282 if (copy_from_user(&rng_req, (void __user *)arg, sizeof(rng_req)))
283 return -EFAULT;
284
285 memcpy(ctx->seed, rng_req.seed, TEGRA_CRYPTO_RNG_SEED_SIZE);
286
287 ret = crypto_rng_reset(ctx->rng, ctx->seed,
288 crypto_rng_seedsize(ctx->rng));
289 break;
290
291 case TEGRA_CRYPTO_IOCTL_GET_RANDOM:
292 if (copy_from_user(&rng_req, (void __user *)arg, sizeof(rng_req)))
293 return -EFAULT;
294
295 rng = kzalloc(rng_req.nbytes, GFP_KERNEL);
296 if (!rng) {
297 pr_err("mem alloc for rng fail");
298 ret = -ENODATA;
299 goto rng_out;
300 }
301
302 ret = crypto_rng_get_bytes(ctx->rng, rng,
303 rng_req.nbytes);
304
305 if (ret != rng_req.nbytes) {
306 pr_err("rng failed");
307 ret = -ENODATA;
308 goto rng_out;
309 }
310
311 ret = copy_to_user((void __user *)rng_req.rdata,
312 (const void *)rng, rng_req.nbytes);
313 ret = (ret < 0) ? -ENODATA : 0;
314rng_out:
315 if (rng)
316 kfree(rng);
317 break;
318
319 default:
320 pr_debug("invalid ioctl code(%d)", ioctl_num);
321 ret = -EINVAL;
322 }
323
324 return ret;
325}
326
327struct file_operations tegra_crypto_fops = {
328 .owner = THIS_MODULE,
329 .open = tegra_crypto_dev_open,
330 .release = tegra_crypto_dev_release,
331 .unlocked_ioctl = tegra_crypto_dev_ioctl,
332};
333
334struct miscdevice tegra_crypto_device = {
335 .minor = MISC_DYNAMIC_MINOR,
336 .name = "tegra-crypto",
337 .fops = &tegra_crypto_fops,
338};
339
340static int __init tegra_crypto_dev_init(void)
341{
342 return misc_register(&tegra_crypto_device);
343}
344
345late_initcall(tegra_crypto_dev_init);
346
347MODULE_DESCRIPTION("Tegra AES hw device node.");
348MODULE_AUTHOR("NVIDIA Corporation");
349MODULE_LICENSE("GPLv2");