2 * drivers/misc/tegra-cryptodev.c
4 * crypto dev node for NVIDIA tegra aes hardware
6 * Copyright (c) 2010, NVIDIA Corporation.
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
13 * This program is distributed in the hope that it will be useful, but WITHOUT
14 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
18 * You should have received a copy of the GNU General Public License along
19 * with this program; if not, write to the Free Software Foundation, Inc.,
20 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
23 #include <linux/module.h>
24 #include <linux/init.h>
25 #include <linux/errno.h>
26 #include <linux/kernel.h>
27 #include <linux/slab.h>
29 #include <linux/miscdevice.h>
30 #include <linux/crypto.h>
31 #include <linux/scatterlist.h>
32 #include <linux/uaccess.h>
33 #include <crypto/rng.h>
35 #include "tegra-cryptodev.h"
39 struct tegra_crypto_ctx {
40 struct crypto_ablkcipher *ecb_tfm;
41 struct crypto_ablkcipher *cbc_tfm;
42 struct crypto_rng *rng;
46 struct tegra_crypto_completion {
47 struct completion restart;
51 static int alloc_bufs(unsigned long *buf[NBUFS])
55 for (i = 0; i < NBUFS; i++) {
56 buf[i] = (void *)__get_free_page(GFP_KERNEL);
65 free_page((unsigned long)buf[i]);
70 static void free_bufs(unsigned long *buf[NBUFS])
74 for (i = 0; i < NBUFS; i++)
75 free_page((unsigned long)buf[i]);
78 static int tegra_crypto_dev_open(struct inode *inode, struct file *filp)
80 struct tegra_crypto_ctx *ctx;
83 ctx = kzalloc(sizeof(struct tegra_crypto_ctx), GFP_KERNEL);
85 pr_err("no memory for context\n");
89 ctx->ecb_tfm = crypto_alloc_ablkcipher("ecb-aes-tegra",
90 CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 0);
91 if (IS_ERR(ctx->ecb_tfm)) {
92 pr_err("Failed to load transform for ecb-aes-tegra: %ld\n",
93 PTR_ERR(ctx->ecb_tfm));
94 ret = PTR_ERR(ctx->ecb_tfm);
98 ctx->cbc_tfm = crypto_alloc_ablkcipher("cbc-aes-tegra",
99 CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 0);
100 if (IS_ERR(ctx->cbc_tfm)) {
101 pr_err("Failed to load transform for cbc-aes-tegra: %ld\n",
102 PTR_ERR(ctx->cbc_tfm));
103 ret = PTR_ERR(ctx->cbc_tfm);
107 ctx->rng = crypto_alloc_rng("rng-aes-tegra", CRYPTO_ALG_TYPE_RNG, 0);
108 if (IS_ERR(ctx->rng)) {
109 pr_err("Failed to load transform for tegra rng: %ld\n",
111 ret = PTR_ERR(ctx->rng);
115 filp->private_data = ctx;
119 crypto_free_ablkcipher(ctx->cbc_tfm);
122 crypto_free_ablkcipher(ctx->ecb_tfm);
129 static int tegra_crypto_dev_release(struct inode *inode, struct file *filp)
131 struct tegra_crypto_ctx *ctx = filp->private_data;
133 crypto_free_ablkcipher(ctx->ecb_tfm);
134 crypto_free_ablkcipher(ctx->cbc_tfm);
135 crypto_free_rng(ctx->rng);
137 filp->private_data = NULL;
141 static void tegra_crypt_complete(struct crypto_async_request *req, int err)
143 struct tegra_crypto_completion *done = req->data;
145 if (err != -EINPROGRESS) {
147 complete(&done->restart);
151 static int process_crypt_req(struct tegra_crypto_ctx *ctx, struct tegra_crypt_req *crypt_req)
153 struct crypto_ablkcipher *tfm;
154 struct ablkcipher_request *req = NULL;
155 struct scatterlist in_sg;
156 struct scatterlist out_sg;
157 unsigned long *xbuf[NBUFS];
158 int ret = 0, size = 0;
159 unsigned long total = 0;
160 struct tegra_crypto_completion tcrypt_complete;
162 if (crypt_req->op & TEGRA_CRYPTO_ECB) {
163 req = ablkcipher_request_alloc(ctx->ecb_tfm, GFP_KERNEL);
166 req = ablkcipher_request_alloc(ctx->cbc_tfm, GFP_KERNEL);
170 pr_err("%s: Failed to allocate request\n", __func__);
174 if ((crypt_req->keylen < 0) || (crypt_req->keylen > AES_MAX_KEY_SIZE))
177 crypto_ablkcipher_clear_flags(tfm, ~0);
180 ret = crypto_ablkcipher_setkey(tfm, crypt_req->key,
183 pr_err("setkey failed");
184 goto process_req_out;
188 ret = alloc_bufs(xbuf);
190 pr_err("alloc_bufs failed");
191 goto process_req_out;
194 init_completion(&tcrypt_complete.restart);
196 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
197 tegra_crypt_complete, &tcrypt_complete);
199 total = crypt_req->plaintext_sz;
201 size = min(total, PAGE_SIZE);
202 ret = copy_from_user(xbuf[0],
203 (void __user *)crypt_req->plaintext, size);
205 pr_debug("%s: copy_from_user failed (%d)\n", __func__, ret);
206 goto process_req_buf_out;
208 sg_init_one(&in_sg, xbuf[0], size);
209 sg_init_one(&out_sg, xbuf[1], size);
211 ablkcipher_request_set_crypt(req, &in_sg,
212 &out_sg, size, crypt_req->iv);
214 INIT_COMPLETION(tcrypt_complete.restart);
215 tcrypt_complete.req_err = 0;
216 ret = crypt_req->encrypt ?
217 crypto_ablkcipher_encrypt(req) :
218 crypto_ablkcipher_decrypt(req);
220 if ((ret == -EINPROGRESS) || (ret == -EBUSY)) {
221 /* crypto driver is asynchronous */
222 ret = wait_for_completion_interruptible(&tcrypt_complete.restart);
225 goto process_req_buf_out;
227 if (tcrypt_complete.req_err < 0) {
228 ret = tcrypt_complete.req_err;
229 goto process_req_buf_out;
231 } else if (ret < 0) {
232 pr_debug("%scrypt failed (%d)\n",
233 crypt_req->encrypt ? "en" : "de", ret);
234 goto process_req_buf_out;
237 ret = copy_to_user((void __user *)crypt_req->result, xbuf[1],
240 goto process_req_buf_out;
243 crypt_req->result += size;
244 crypt_req->plaintext += size;
250 ablkcipher_request_free(req);
255 static long tegra_crypto_dev_ioctl(struct file *filp,
256 unsigned int ioctl_num, unsigned long arg)
258 struct tegra_crypto_ctx *ctx = filp->private_data;
259 struct tegra_crypt_req crypt_req;
260 struct tegra_rng_req rng_req;
265 case TEGRA_CRYPTO_IOCTL_NEED_SSK:
266 ctx->use_ssk = (int)arg;
268 case TEGRA_CRYPTO_IOCTL_PROCESS_REQ:
269 ret = copy_from_user(&crypt_req, (void __user *)arg, sizeof(crypt_req));
271 pr_debug("%s: copy_from_user fail(%d)\n", __func__, ret);
275 ret = process_crypt_req(ctx, &crypt_req);
278 case TEGRA_CRYPTO_IOCTL_SET_SEED:
279 if (copy_from_user(&rng_req, (void __user *)arg, sizeof(rng_req)))
282 ret = crypto_rng_reset(ctx->rng, rng_req.seed,
283 crypto_rng_seedsize(ctx->rng));
285 case TEGRA_CRYPTO_IOCTL_GET_RANDOM:
286 if (copy_from_user(&rng_req, (void __user *)arg, sizeof(rng_req)))
289 rng = kzalloc(rng_req.nbytes, GFP_KERNEL);
291 pr_err("mem alloc for rng fail");
296 ret = crypto_rng_get_bytes(ctx->rng, rng, rng_req.nbytes);
298 if (ret != rng_req.nbytes) {
299 pr_debug("rng failed");
304 ret = copy_to_user((void __user *)rng_req.rdata,
305 rng, rng_req.nbytes);
306 ret = (ret < 0) ? -ENODATA : 0;
315 pr_debug("invalid ioctl code(%d)", ioctl_num);
321 struct file_operations tegra_crypto_fops = {
322 .owner = THIS_MODULE,
323 .open = tegra_crypto_dev_open,
324 .release = tegra_crypto_dev_release,
325 .unlocked_ioctl = tegra_crypto_dev_ioctl,
328 struct miscdevice tegra_crypto_device = {
329 .minor = MISC_DYNAMIC_MINOR,
330 .name = "tegra-crypto",
331 .fops = &tegra_crypto_fops,
334 static int __init tegra_crypto_dev_init(void)
336 return misc_register(&tegra_crypto_device);
339 late_initcall(tegra_crypto_dev_init);
341 MODULE_DESCRIPTION("Tegra AES hw device node.");
342 MODULE_AUTHOR("NVIDIA Corporation");
343 MODULE_LICENSE("GPLv2");