aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorZain Wang <zain.wang@rock-chips.com>2015-11-25 00:43:32 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2015-11-27 08:19:32 -0500
commit433cd2c617bfbac27a02e40fbcce1713c84ce441 (patch)
tree85707d62dbcbf060eaa26d9034c65cef5bd25fa2
parente81c1b4646149c1e67610c83e8770a7217491a13 (diff)
crypto: rockchip - add crypto driver for rk3288
Crypto driver support: ecb(aes) cbc(aes) ecb(des) cbc(des) ecb(des3_ede) cbc(des3_ede) You can alloc tags above in your case. And other algorithms and platforms will be added later on. Signed-off-by: Zain Wang <zain.wang@rock-chips.com> Tested-by: Heiko Stuebner <heiko@sntech.de> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--drivers/crypto/Kconfig11
-rw-r--r--drivers/crypto/Makefile1
-rw-r--r--drivers/crypto/rockchip/Makefile3
-rw-r--r--drivers/crypto/rockchip/rk3288_crypto.c393
-rw-r--r--drivers/crypto/rockchip/rk3288_crypto.h216
-rw-r--r--drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c503
6 files changed, 1127 insertions, 0 deletions
diff --git a/drivers/crypto/Kconfig b/drivers/crypto/Kconfig
index 5357bc1f9e4b..95dccdea4dd1 100644
--- a/drivers/crypto/Kconfig
+++ b/drivers/crypto/Kconfig
@@ -497,4 +497,15 @@ config CRYPTO_DEV_SUN4I_SS
497 To compile this driver as a module, choose M here: the module 497 To compile this driver as a module, choose M here: the module
498 will be called sun4i-ss. 498 will be called sun4i-ss.
499 499
500config CRYPTO_DEV_ROCKCHIP
501 tristate "Rockchip's Cryptographic Engine driver"
502 depends on OF && ARCH_ROCKCHIP
503 select CRYPTO_AES
504 select CRYPTO_DES
505 select CRYPTO_BLKCIPHER
506
507 help
508 This driver interfaces with the hardware crypto accelerator.
509 Supporting cbc/ecb chainmode, and aes/des/des3_ede cipher mode.
510
500endif # CRYPTO_HW 511endif # CRYPTO_HW
diff --git a/drivers/crypto/Makefile b/drivers/crypto/Makefile
index c3ced6fbd1b8..713de9d11148 100644
--- a/drivers/crypto/Makefile
+++ b/drivers/crypto/Makefile
@@ -29,3 +29,4 @@ obj-$(CONFIG_CRYPTO_DEV_QAT) += qat/
29obj-$(CONFIG_CRYPTO_DEV_QCE) += qce/ 29obj-$(CONFIG_CRYPTO_DEV_QCE) += qce/
30obj-$(CONFIG_CRYPTO_DEV_VMX) += vmx/ 30obj-$(CONFIG_CRYPTO_DEV_VMX) += vmx/
31obj-$(CONFIG_CRYPTO_DEV_SUN4I_SS) += sunxi-ss/ 31obj-$(CONFIG_CRYPTO_DEV_SUN4I_SS) += sunxi-ss/
32obj-$(CONFIG_CRYPTO_DEV_ROCKCHIP) += rockchip/
diff --git a/drivers/crypto/rockchip/Makefile b/drivers/crypto/rockchip/Makefile
new file mode 100644
index 000000000000..7051c6c715f3
--- /dev/null
+++ b/drivers/crypto/rockchip/Makefile
@@ -0,0 +1,3 @@
1obj-$(CONFIG_CRYPTO_DEV_ROCKCHIP) += rk_crypto.o
2rk_crypto-objs := rk3288_crypto.o \
3 rk3288_crypto_ablkcipher.o \
diff --git a/drivers/crypto/rockchip/rk3288_crypto.c b/drivers/crypto/rockchip/rk3288_crypto.c
new file mode 100644
index 000000000000..6b72f8d2e643
--- /dev/null
+++ b/drivers/crypto/rockchip/rk3288_crypto.c
@@ -0,0 +1,393 @@
1/*
2 * Crypto acceleration support for Rockchip RK3288
3 *
4 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
5 *
6 * Author: Zain Wang <zain.wang@rock-chips.com>
7 *
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms and conditions of the GNU General Public License,
10 * version 2, as published by the Free Software Foundation.
11 *
12 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
13 */
14
15#include "rk3288_crypto.h"
16#include <linux/module.h>
17#include <linux/platform_device.h>
18#include <linux/of.h>
19#include <linux/clk.h>
20#include <linux/crypto.h>
21#include <linux/reset.h>
22
23static int rk_crypto_enable_clk(struct rk_crypto_info *dev)
24{
25 int err;
26
27 err = clk_prepare_enable(dev->sclk);
28 if (err) {
29 dev_err(dev->dev, "[%s:%d], Couldn't enable clock sclk\n",
30 __func__, __LINE__);
31 goto err_return;
32 }
33 err = clk_prepare_enable(dev->aclk);
34 if (err) {
35 dev_err(dev->dev, "[%s:%d], Couldn't enable clock aclk\n",
36 __func__, __LINE__);
37 goto err_aclk;
38 }
39 err = clk_prepare_enable(dev->hclk);
40 if (err) {
41 dev_err(dev->dev, "[%s:%d], Couldn't enable clock hclk\n",
42 __func__, __LINE__);
43 goto err_hclk;
44 }
45 err = clk_prepare_enable(dev->dmaclk);
46 if (err) {
47 dev_err(dev->dev, "[%s:%d], Couldn't enable clock dmaclk\n",
48 __func__, __LINE__);
49 goto err_dmaclk;
50 }
51 return err;
52err_dmaclk:
53 clk_disable_unprepare(dev->hclk);
54err_hclk:
55 clk_disable_unprepare(dev->aclk);
56err_aclk:
57 clk_disable_unprepare(dev->sclk);
58err_return:
59 return err;
60}
61
62static void rk_crypto_disable_clk(struct rk_crypto_info *dev)
63{
64 clk_disable_unprepare(dev->dmaclk);
65 clk_disable_unprepare(dev->hclk);
66 clk_disable_unprepare(dev->aclk);
67 clk_disable_unprepare(dev->sclk);
68}
69
70static int check_alignment(struct scatterlist *sg_src,
71 struct scatterlist *sg_dst,
72 int align_mask)
73{
74 int in, out, align;
75
76 in = IS_ALIGNED((uint32_t)sg_src->offset, 4) &&
77 IS_ALIGNED((uint32_t)sg_src->length, align_mask);
78 if (!sg_dst)
79 return in;
80 out = IS_ALIGNED((uint32_t)sg_dst->offset, 4) &&
81 IS_ALIGNED((uint32_t)sg_dst->length, align_mask);
82 align = in && out;
83
84 return (align && (sg_src->length == sg_dst->length));
85}
86
87static int rk_load_data(struct rk_crypto_info *dev,
88 struct scatterlist *sg_src,
89 struct scatterlist *sg_dst)
90{
91 unsigned int count;
92
93 dev->aligned = dev->aligned ?
94 check_alignment(sg_src, sg_dst, dev->align_size) :
95 dev->aligned;
96 if (dev->aligned) {
97 count = min(dev->left_bytes, sg_src->length);
98 dev->left_bytes -= count;
99
100 if (!dma_map_sg(dev->dev, sg_src, 1, DMA_TO_DEVICE)) {
101 dev_err(dev->dev, "[%s:%d] dma_map_sg(src) error\n",
102 __func__, __LINE__);
103 return -EINVAL;
104 }
105 dev->addr_in = sg_dma_address(sg_src);
106
107 if (sg_dst) {
108 if (!dma_map_sg(dev->dev, sg_dst, 1, DMA_FROM_DEVICE)) {
109 dev_err(dev->dev,
110 "[%s:%d] dma_map_sg(dst) error\n",
111 __func__, __LINE__);
112 dma_unmap_sg(dev->dev, sg_src, 1,
113 DMA_TO_DEVICE);
114 return -EINVAL;
115 }
116 dev->addr_out = sg_dma_address(sg_dst);
117 }
118 } else {
119 count = (dev->left_bytes > PAGE_SIZE) ?
120 PAGE_SIZE : dev->left_bytes;
121
122 if (!sg_pcopy_to_buffer(dev->first, dev->nents,
123 dev->addr_vir, count,
124 dev->total - dev->left_bytes)) {
125 dev_err(dev->dev, "[%s:%d] pcopy err\n",
126 __func__, __LINE__);
127 return -EINVAL;
128 }
129 dev->left_bytes -= count;
130 sg_init_one(&dev->sg_tmp, dev->addr_vir, count);
131 if (!dma_map_sg(dev->dev, &dev->sg_tmp, 1, DMA_TO_DEVICE)) {
132 dev_err(dev->dev, "[%s:%d] dma_map_sg(sg_tmp) error\n",
133 __func__, __LINE__);
134 return -ENOMEM;
135 }
136 dev->addr_in = sg_dma_address(&dev->sg_tmp);
137
138 if (sg_dst) {
139 if (!dma_map_sg(dev->dev, &dev->sg_tmp, 1,
140 DMA_FROM_DEVICE)) {
141 dev_err(dev->dev,
142 "[%s:%d] dma_map_sg(sg_tmp) error\n",
143 __func__, __LINE__);
144 dma_unmap_sg(dev->dev, &dev->sg_tmp, 1,
145 DMA_TO_DEVICE);
146 return -ENOMEM;
147 }
148 dev->addr_out = sg_dma_address(&dev->sg_tmp);
149 }
150 }
151 dev->count = count;
152 return 0;
153}
154
155static void rk_unload_data(struct rk_crypto_info *dev)
156{
157 struct scatterlist *sg_in, *sg_out;
158
159 sg_in = dev->aligned ? dev->sg_src : &dev->sg_tmp;
160 dma_unmap_sg(dev->dev, sg_in, 1, DMA_TO_DEVICE);
161
162 if (dev->sg_dst) {
163 sg_out = dev->aligned ? dev->sg_dst : &dev->sg_tmp;
164 dma_unmap_sg(dev->dev, sg_out, 1, DMA_FROM_DEVICE);
165 }
166}
167
168static irqreturn_t rk_crypto_irq_handle(int irq, void *dev_id)
169{
170 struct rk_crypto_info *dev = platform_get_drvdata(dev_id);
171 u32 interrupt_status;
172 int err = 0;
173
174 spin_lock(&dev->lock);
175 interrupt_status = CRYPTO_READ(dev, RK_CRYPTO_INTSTS);
176 CRYPTO_WRITE(dev, RK_CRYPTO_INTSTS, interrupt_status);
177 if (interrupt_status & 0x0a) {
178 dev_warn(dev->dev, "DMA Error\n");
179 err = -EFAULT;
180 } else if (interrupt_status & 0x05) {
181 err = dev->update(dev);
182 }
183 if (err)
184 dev->complete(dev, err);
185 spin_unlock(&dev->lock);
186 return IRQ_HANDLED;
187}
188
189static void rk_crypto_tasklet_cb(unsigned long data)
190{
191 struct rk_crypto_info *dev = (struct rk_crypto_info *)data;
192 struct crypto_async_request *async_req, *backlog;
193 int err = 0;
194
195 spin_lock(&dev->lock);
196 backlog = crypto_get_backlog(&dev->queue);
197 async_req = crypto_dequeue_request(&dev->queue);
198 spin_unlock(&dev->lock);
199 if (!async_req) {
200 dev_err(dev->dev, "async_req is NULL !!\n");
201 return;
202 }
203 if (backlog) {
204 backlog->complete(backlog, -EINPROGRESS);
205 backlog = NULL;
206 }
207
208 if (crypto_tfm_alg_type(async_req->tfm) == CRYPTO_ALG_TYPE_ABLKCIPHER)
209 dev->ablk_req = ablkcipher_request_cast(async_req);
210 err = dev->start(dev);
211 if (err)
212 dev->complete(dev, err);
213}
214
215static struct rk_crypto_tmp *rk_cipher_algs[] = {
216 &rk_ecb_aes_alg,
217 &rk_cbc_aes_alg,
218 &rk_ecb_des_alg,
219 &rk_cbc_des_alg,
220 &rk_ecb_des3_ede_alg,
221 &rk_cbc_des3_ede_alg,
222};
223
224static int rk_crypto_register(struct rk_crypto_info *crypto_info)
225{
226 unsigned int i, k;
227 int err = 0;
228
229 for (i = 0; i < ARRAY_SIZE(rk_cipher_algs); i++) {
230 rk_cipher_algs[i]->dev = crypto_info;
231 err = crypto_register_alg(&rk_cipher_algs[i]->alg);
232 if (err)
233 goto err_cipher_algs;
234 }
235 return 0;
236
237err_cipher_algs:
238 for (k = 0; k < i; k++)
239 crypto_unregister_alg(&rk_cipher_algs[k]->alg);
240 return err;
241}
242
243static void rk_crypto_unregister(void)
244{
245 unsigned int i;
246
247 for (i = 0; i < ARRAY_SIZE(rk_cipher_algs); i++)
248 crypto_unregister_alg(&rk_cipher_algs[i]->alg);
249}
250
251static void rk_crypto_action(void *data)
252{
253 struct rk_crypto_info *crypto_info = data;
254
255 reset_control_assert(crypto_info->rst);
256}
257
258static const struct of_device_id crypto_of_id_table[] = {
259 { .compatible = "rockchip,rk3288-crypto" },
260 {}
261};
262MODULE_DEVICE_TABLE(of, crypto_of_id_table);
263
264static int rk_crypto_probe(struct platform_device *pdev)
265{
266 struct resource *res;
267 struct device *dev = &pdev->dev;
268 struct rk_crypto_info *crypto_info;
269 int err = 0;
270
271 crypto_info = devm_kzalloc(&pdev->dev,
272 sizeof(*crypto_info), GFP_KERNEL);
273 if (!crypto_info) {
274 err = -ENOMEM;
275 goto err_crypto;
276 }
277
278 crypto_info->rst = devm_reset_control_get(dev, "crypto-rst");
279 if (IS_ERR(crypto_info->rst)) {
280 err = PTR_ERR(crypto_info->rst);
281 goto err_crypto;
282 }
283
284 reset_control_assert(crypto_info->rst);
285 usleep_range(10, 20);
286 reset_control_deassert(crypto_info->rst);
287
288 err = devm_add_action(dev, rk_crypto_action, crypto_info);
289 if (err) {
290 reset_control_assert(crypto_info->rst);
291 goto err_crypto;
292 }
293
294 spin_lock_init(&crypto_info->lock);
295
296 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
297 crypto_info->reg = devm_ioremap_resource(&pdev->dev, res);
298 if (IS_ERR(crypto_info->reg)) {
299 err = PTR_ERR(crypto_info->reg);
300 goto err_crypto;
301 }
302
303 crypto_info->aclk = devm_clk_get(&pdev->dev, "aclk");
304 if (IS_ERR(crypto_info->aclk)) {
305 err = PTR_ERR(crypto_info->aclk);
306 goto err_crypto;
307 }
308
309 crypto_info->hclk = devm_clk_get(&pdev->dev, "hclk");
310 if (IS_ERR(crypto_info->hclk)) {
311 err = PTR_ERR(crypto_info->hclk);
312 goto err_crypto;
313 }
314
315 crypto_info->sclk = devm_clk_get(&pdev->dev, "sclk");
316 if (IS_ERR(crypto_info->sclk)) {
317 err = PTR_ERR(crypto_info->sclk);
318 goto err_crypto;
319 }
320
321 crypto_info->dmaclk = devm_clk_get(&pdev->dev, "apb_pclk");
322 if (IS_ERR(crypto_info->dmaclk)) {
323 err = PTR_ERR(crypto_info->dmaclk);
324 goto err_crypto;
325 }
326
327 crypto_info->irq = platform_get_irq(pdev, 0);
328 if (crypto_info->irq < 0) {
329 dev_warn(crypto_info->dev,
330 "control Interrupt is not available.\n");
331 err = crypto_info->irq;
332 goto err_crypto;
333 }
334
335 err = devm_request_irq(&pdev->dev, crypto_info->irq,
336 rk_crypto_irq_handle, IRQF_SHARED,
337 "rk-crypto", pdev);
338
339 if (err) {
340 dev_err(crypto_info->dev, "irq request failed.\n");
341 goto err_crypto;
342 }
343
344 crypto_info->dev = &pdev->dev;
345 platform_set_drvdata(pdev, crypto_info);
346
347 tasklet_init(&crypto_info->crypto_tasklet,
348 rk_crypto_tasklet_cb, (unsigned long)crypto_info);
349 crypto_init_queue(&crypto_info->queue, 50);
350
351 crypto_info->enable_clk = rk_crypto_enable_clk;
352 crypto_info->disable_clk = rk_crypto_disable_clk;
353 crypto_info->load_data = rk_load_data;
354 crypto_info->unload_data = rk_unload_data;
355
356 err = rk_crypto_register(crypto_info);
357 if (err) {
358 dev_err(dev, "err in register alg");
359 goto err_register_alg;
360 }
361
362 dev_info(dev, "Crypto Accelerator successfully registered\n");
363 return 0;
364
365err_register_alg:
366 tasklet_kill(&crypto_info->crypto_tasklet);
367err_crypto:
368 return err;
369}
370
371static int rk_crypto_remove(struct platform_device *pdev)
372{
373 struct rk_crypto_info *crypto_tmp = platform_get_drvdata(pdev);
374
375 rk_crypto_unregister();
376 tasklet_kill(&crypto_tmp->crypto_tasklet);
377 return 0;
378}
379
380static struct platform_driver crypto_driver = {
381 .probe = rk_crypto_probe,
382 .remove = rk_crypto_remove,
383 .driver = {
384 .name = "rk3288-crypto",
385 .of_match_table = crypto_of_id_table,
386 },
387};
388
389module_platform_driver(crypto_driver);
390
391MODULE_AUTHOR("Zain Wang <zain.wang@rock-chips.com>");
392MODULE_DESCRIPTION("Support for Rockchip's cryptographic engine");
393MODULE_LICENSE("GPL");
diff --git a/drivers/crypto/rockchip/rk3288_crypto.h b/drivers/crypto/rockchip/rk3288_crypto.h
new file mode 100644
index 000000000000..e499c2c6c903
--- /dev/null
+++ b/drivers/crypto/rockchip/rk3288_crypto.h
@@ -0,0 +1,216 @@
1#ifndef __RK3288_CRYPTO_H__
2#define __RK3288_CRYPTO_H__
3
4#include <crypto/aes.h>
5#include <crypto/des.h>
6#include <crypto/algapi.h>
7#include <linux/interrupt.h>
8#include <linux/delay.h>
9
10#define _SBF(v, f) ((v) << (f))
11
12/* Crypto control registers*/
13#define RK_CRYPTO_INTSTS 0x0000
14#define RK_CRYPTO_PKA_DONE_INT BIT(5)
15#define RK_CRYPTO_HASH_DONE_INT BIT(4)
16#define RK_CRYPTO_HRDMA_ERR_INT BIT(3)
17#define RK_CRYPTO_HRDMA_DONE_INT BIT(2)
18#define RK_CRYPTO_BCDMA_ERR_INT BIT(1)
19#define RK_CRYPTO_BCDMA_DONE_INT BIT(0)
20
21#define RK_CRYPTO_INTENA 0x0004
22#define RK_CRYPTO_PKA_DONE_ENA BIT(5)
23#define RK_CRYPTO_HASH_DONE_ENA BIT(4)
24#define RK_CRYPTO_HRDMA_ERR_ENA BIT(3)
25#define RK_CRYPTO_HRDMA_DONE_ENA BIT(2)
26#define RK_CRYPTO_BCDMA_ERR_ENA BIT(1)
27#define RK_CRYPTO_BCDMA_DONE_ENA BIT(0)
28
29#define RK_CRYPTO_CTRL 0x0008
30#define RK_CRYPTO_WRITE_MASK _SBF(0xFFFF, 16)
31#define RK_CRYPTO_TRNG_FLUSH BIT(9)
32#define RK_CRYPTO_TRNG_START BIT(8)
33#define RK_CRYPTO_PKA_FLUSH BIT(7)
34#define RK_CRYPTO_HASH_FLUSH BIT(6)
35#define RK_CRYPTO_BLOCK_FLUSH BIT(5)
36#define RK_CRYPTO_PKA_START BIT(4)
37#define RK_CRYPTO_HASH_START BIT(3)
38#define RK_CRYPTO_BLOCK_START BIT(2)
39#define RK_CRYPTO_TDES_START BIT(1)
40#define RK_CRYPTO_AES_START BIT(0)
41
42#define RK_CRYPTO_CONF 0x000c
43/* HASH Receive DMA Address Mode: fix | increment */
44#define RK_CRYPTO_HR_ADDR_MODE BIT(8)
45/* Block Transmit DMA Address Mode: fix | increment */
46#define RK_CRYPTO_BT_ADDR_MODE BIT(7)
47/* Block Receive DMA Address Mode: fix | increment */
48#define RK_CRYPTO_BR_ADDR_MODE BIT(6)
49#define RK_CRYPTO_BYTESWAP_HRFIFO BIT(5)
50#define RK_CRYPTO_BYTESWAP_BTFIFO BIT(4)
51#define RK_CRYPTO_BYTESWAP_BRFIFO BIT(3)
52/* AES = 0 OR DES = 1 */
53#define RK_CRYPTO_DESSEL BIT(2)
54#define RK_CYYPTO_HASHINSEL_INDEPENDENT_SOURCE _SBF(0x00, 0)
55#define RK_CYYPTO_HASHINSEL_BLOCK_CIPHER_INPUT _SBF(0x01, 0)
56#define RK_CYYPTO_HASHINSEL_BLOCK_CIPHER_OUTPUT _SBF(0x02, 0)
57
58/* Block Receiving DMA Start Address Register */
59#define RK_CRYPTO_BRDMAS 0x0010
60/* Block Transmitting DMA Start Address Register */
61#define RK_CRYPTO_BTDMAS 0x0014
62/* Block Receiving DMA Length Register */
63#define RK_CRYPTO_BRDMAL 0x0018
64/* Hash Receiving DMA Start Address Register */
65#define RK_CRYPTO_HRDMAS 0x001c
66/* Hash Receiving DMA Length Register */
67#define RK_CRYPTO_HRDMAL 0x0020
68
69/* AES registers */
70#define RK_CRYPTO_AES_CTRL 0x0080
71#define RK_CRYPTO_AES_BYTESWAP_CNT BIT(11)
72#define RK_CRYPTO_AES_BYTESWAP_KEY BIT(10)
73#define RK_CRYPTO_AES_BYTESWAP_IV BIT(9)
74#define RK_CRYPTO_AES_BYTESWAP_DO BIT(8)
75#define RK_CRYPTO_AES_BYTESWAP_DI BIT(7)
76#define RK_CRYPTO_AES_KEY_CHANGE BIT(6)
77#define RK_CRYPTO_AES_ECB_MODE _SBF(0x00, 4)
78#define RK_CRYPTO_AES_CBC_MODE _SBF(0x01, 4)
79#define RK_CRYPTO_AES_CTR_MODE _SBF(0x02, 4)
80#define RK_CRYPTO_AES_128BIT_key _SBF(0x00, 2)
81#define RK_CRYPTO_AES_192BIT_key _SBF(0x01, 2)
82#define RK_CRYPTO_AES_256BIT_key _SBF(0x02, 2)
83/* Slave = 0 / fifo = 1 */
84#define RK_CRYPTO_AES_FIFO_MODE BIT(1)
85/* Encryption = 0 , Decryption = 1 */
86#define RK_CRYPTO_AES_DEC BIT(0)
87
88#define RK_CRYPTO_AES_STS 0x0084
89#define RK_CRYPTO_AES_DONE BIT(0)
90
91/* AES Input Data 0-3 Register */
92#define RK_CRYPTO_AES_DIN_0 0x0088
93#define RK_CRYPTO_AES_DIN_1 0x008c
94#define RK_CRYPTO_AES_DIN_2 0x0090
95#define RK_CRYPTO_AES_DIN_3 0x0094
96
97/* AES output Data 0-3 Register */
98#define RK_CRYPTO_AES_DOUT_0 0x0098
99#define RK_CRYPTO_AES_DOUT_1 0x009c
100#define RK_CRYPTO_AES_DOUT_2 0x00a0
101#define RK_CRYPTO_AES_DOUT_3 0x00a4
102
103/* AES IV Data 0-3 Register */
104#define RK_CRYPTO_AES_IV_0 0x00a8
105#define RK_CRYPTO_AES_IV_1 0x00ac
106#define RK_CRYPTO_AES_IV_2 0x00b0
107#define RK_CRYPTO_AES_IV_3 0x00b4
108
109/* AES Key Data 0-3 Register */
110#define RK_CRYPTO_AES_KEY_0 0x00b8
111#define RK_CRYPTO_AES_KEY_1 0x00bc
112#define RK_CRYPTO_AES_KEY_2 0x00c0
113#define RK_CRYPTO_AES_KEY_3 0x00c4
114#define RK_CRYPTO_AES_KEY_4 0x00c8
115#define RK_CRYPTO_AES_KEY_5 0x00cc
116#define RK_CRYPTO_AES_KEY_6 0x00d0
117#define RK_CRYPTO_AES_KEY_7 0x00d4
118
119/* des/tdes */
120#define RK_CRYPTO_TDES_CTRL 0x0100
121#define RK_CRYPTO_TDES_BYTESWAP_KEY BIT(8)
122#define RK_CRYPTO_TDES_BYTESWAP_IV BIT(7)
123#define RK_CRYPTO_TDES_BYTESWAP_DO BIT(6)
124#define RK_CRYPTO_TDES_BYTESWAP_DI BIT(5)
125/* 0: ECB, 1: CBC */
126#define RK_CRYPTO_TDES_CHAINMODE_CBC BIT(4)
127/* TDES Key Mode, 0 : EDE, 1 : EEE */
128#define RK_CRYPTO_TDES_EEE BIT(3)
129/* 0: DES, 1:TDES */
130#define RK_CRYPTO_TDES_SELECT BIT(2)
131/* 0: Slave, 1:Fifo */
132#define RK_CRYPTO_TDES_FIFO_MODE BIT(1)
133/* Encryption = 0 , Decryption = 1 */
134#define RK_CRYPTO_TDES_DEC BIT(0)
135
136#define RK_CRYPTO_TDES_STS 0x0104
137#define RK_CRYPTO_TDES_DONE BIT(0)
138
139#define RK_CRYPTO_TDES_DIN_0 0x0108
140#define RK_CRYPTO_TDES_DIN_1 0x010c
141#define RK_CRYPTO_TDES_DOUT_0 0x0110
142#define RK_CRYPTO_TDES_DOUT_1 0x0114
143#define RK_CRYPTO_TDES_IV_0 0x0118
144#define RK_CRYPTO_TDES_IV_1 0x011c
145#define RK_CRYPTO_TDES_KEY1_0 0x0120
146#define RK_CRYPTO_TDES_KEY1_1 0x0124
147#define RK_CRYPTO_TDES_KEY2_0 0x0128
148#define RK_CRYPTO_TDES_KEY2_1 0x012c
149#define RK_CRYPTO_TDES_KEY3_0 0x0130
150#define RK_CRYPTO_TDES_KEY3_1 0x0134
151
152#define CRYPTO_READ(dev, offset) \
153 readl_relaxed(((dev)->reg + (offset)))
154#define CRYPTO_WRITE(dev, offset, val) \
155 writel_relaxed((val), ((dev)->reg + (offset)))
156
157struct rk_crypto_info {
158 struct device *dev;
159 struct clk *aclk;
160 struct clk *hclk;
161 struct clk *sclk;
162 struct clk *dmaclk;
163 struct reset_control *rst;
164 void __iomem *reg;
165 int irq;
166 struct crypto_queue queue;
167 struct tasklet_struct crypto_tasklet;
168 struct ablkcipher_request *ablk_req;
169 /* device lock */
170 spinlock_t lock;
171
172 /* the public variable */
173 struct scatterlist *sg_src;
174 struct scatterlist *sg_dst;
175 struct scatterlist sg_tmp;
176 struct scatterlist *first;
177 unsigned int left_bytes;
178 void *addr_vir;
179 int aligned;
180 int align_size;
181 size_t nents;
182 unsigned int total;
183 unsigned int count;
184 u32 mode;
185 dma_addr_t addr_in;
186 dma_addr_t addr_out;
187 int (*start)(struct rk_crypto_info *dev);
188 int (*update)(struct rk_crypto_info *dev);
189 void (*complete)(struct rk_crypto_info *dev, int err);
190 int (*enable_clk)(struct rk_crypto_info *dev);
191 void (*disable_clk)(struct rk_crypto_info *dev);
192 int (*load_data)(struct rk_crypto_info *dev,
193 struct scatterlist *sg_src,
194 struct scatterlist *sg_dst);
195 void (*unload_data)(struct rk_crypto_info *dev);
196};
197
198/* the private variable of cipher */
199struct rk_cipher_ctx {
200 struct rk_crypto_info *dev;
201 unsigned int keylen;
202};
203
204struct rk_crypto_tmp {
205 struct rk_crypto_info *dev;
206 struct crypto_alg alg;
207};
208
209extern struct rk_crypto_tmp rk_ecb_aes_alg;
210extern struct rk_crypto_tmp rk_cbc_aes_alg;
211extern struct rk_crypto_tmp rk_ecb_des_alg;
212extern struct rk_crypto_tmp rk_cbc_des_alg;
213extern struct rk_crypto_tmp rk_ecb_des3_ede_alg;
214extern struct rk_crypto_tmp rk_cbc_des3_ede_alg;
215
216#endif
diff --git a/drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c b/drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c
new file mode 100644
index 000000000000..4a8f9def0c8b
--- /dev/null
+++ b/drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c
@@ -0,0 +1,503 @@
1/*
2 * Crypto acceleration support for Rockchip RK3288
3 *
4 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
5 *
6 * Author: Zain Wang <zain.wang@rock-chips.com>
7 *
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms and conditions of the GNU General Public License,
10 * version 2, as published by the Free Software Foundation.
11 *
12 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
13 */
14#include "rk3288_crypto.h"
15
16#define RK_CRYPTO_DEC BIT(0)
17
18static void rk_crypto_complete(struct rk_crypto_info *dev, int err)
19{
20 if (dev->ablk_req->base.complete)
21 dev->ablk_req->base.complete(&dev->ablk_req->base, err);
22}
23
24static int rk_handle_req(struct rk_crypto_info *dev,
25 struct ablkcipher_request *req)
26{
27 int err;
28
29 if (!IS_ALIGNED(req->nbytes, dev->align_size))
30 return -EINVAL;
31
32 dev->left_bytes = req->nbytes;
33 dev->total = req->nbytes;
34 dev->sg_src = req->src;
35 dev->first = req->src;
36 dev->nents = sg_nents(req->src);
37 dev->sg_dst = req->dst;
38 dev->aligned = 1;
39 dev->ablk_req = req;
40
41 spin_lock(&dev->lock);
42 err = ablkcipher_enqueue_request(&dev->queue, req);
43 spin_unlock(&dev->lock);
44 tasklet_schedule(&dev->crypto_tasklet);
45 return err;
46}
47
48static int rk_aes_setkey(struct crypto_ablkcipher *cipher,
49 const u8 *key, unsigned int keylen)
50{
51 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
52 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
53
54 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
55 keylen != AES_KEYSIZE_256) {
56 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
57 return -EINVAL;
58 }
59 ctx->keylen = keylen;
60 memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
61 return 0;
62}
63
64static int rk_tdes_setkey(struct crypto_ablkcipher *cipher,
65 const u8 *key, unsigned int keylen)
66{
67 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
68 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
69 u32 tmp[DES_EXPKEY_WORDS];
70
71 if (keylen != DES_KEY_SIZE && keylen != DES3_EDE_KEY_SIZE) {
72 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
73 return -EINVAL;
74 }
75
76 if (keylen == DES_KEY_SIZE) {
77 if (!des_ekey(tmp, key) &&
78 (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
79 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
80 return -EINVAL;
81 }
82 }
83
84 ctx->keylen = keylen;
85 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
86 return 0;
87}
88
89static int rk_aes_ecb_encrypt(struct ablkcipher_request *req)
90{
91 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
92 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
93 struct rk_crypto_info *dev = ctx->dev;
94
95 dev->mode = RK_CRYPTO_AES_ECB_MODE;
96 return rk_handle_req(dev, req);
97}
98
99static int rk_aes_ecb_decrypt(struct ablkcipher_request *req)
100{
101 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
102 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
103 struct rk_crypto_info *dev = ctx->dev;
104
105 dev->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
106 return rk_handle_req(dev, req);
107}
108
109static int rk_aes_cbc_encrypt(struct ablkcipher_request *req)
110{
111 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
112 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
113 struct rk_crypto_info *dev = ctx->dev;
114
115 dev->mode = RK_CRYPTO_AES_CBC_MODE;
116 return rk_handle_req(dev, req);
117}
118
119static int rk_aes_cbc_decrypt(struct ablkcipher_request *req)
120{
121 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
122 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
123 struct rk_crypto_info *dev = ctx->dev;
124
125 dev->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
126 return rk_handle_req(dev, req);
127}
128
129static int rk_des_ecb_encrypt(struct ablkcipher_request *req)
130{
131 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
132 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
133 struct rk_crypto_info *dev = ctx->dev;
134
135 dev->mode = 0;
136 return rk_handle_req(dev, req);
137}
138
139static int rk_des_ecb_decrypt(struct ablkcipher_request *req)
140{
141 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
142 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
143 struct rk_crypto_info *dev = ctx->dev;
144
145 dev->mode = RK_CRYPTO_DEC;
146 return rk_handle_req(dev, req);
147}
148
149static int rk_des_cbc_encrypt(struct ablkcipher_request *req)
150{
151 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
152 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
153 struct rk_crypto_info *dev = ctx->dev;
154
155 dev->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
156 return rk_handle_req(dev, req);
157}
158
159static int rk_des_cbc_decrypt(struct ablkcipher_request *req)
160{
161 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
162 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
163 struct rk_crypto_info *dev = ctx->dev;
164
165 dev->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
166 return rk_handle_req(dev, req);
167}
168
169static int rk_des3_ede_ecb_encrypt(struct ablkcipher_request *req)
170{
171 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
172 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
173 struct rk_crypto_info *dev = ctx->dev;
174
175 dev->mode = RK_CRYPTO_TDES_SELECT;
176 return rk_handle_req(dev, req);
177}
178
179static int rk_des3_ede_ecb_decrypt(struct ablkcipher_request *req)
180{
181 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
182 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
183 struct rk_crypto_info *dev = ctx->dev;
184
185 dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
186 return rk_handle_req(dev, req);
187}
188
189static int rk_des3_ede_cbc_encrypt(struct ablkcipher_request *req)
190{
191 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
192 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
193 struct rk_crypto_info *dev = ctx->dev;
194
195 dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
196 return rk_handle_req(dev, req);
197}
198
199static int rk_des3_ede_cbc_decrypt(struct ablkcipher_request *req)
200{
201 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
202 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
203 struct rk_crypto_info *dev = ctx->dev;
204
205 dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
206 RK_CRYPTO_DEC;
207 return rk_handle_req(dev, req);
208}
209
210static void rk_ablk_hw_init(struct rk_crypto_info *dev)
211{
212 struct crypto_ablkcipher *cipher =
213 crypto_ablkcipher_reqtfm(dev->ablk_req);
214 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
215 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
216 u32 ivsize, block, conf_reg = 0;
217
218 block = crypto_tfm_alg_blocksize(tfm);
219 ivsize = crypto_ablkcipher_ivsize(cipher);
220
221 if (block == DES_BLOCK_SIZE) {
222 dev->mode |= RK_CRYPTO_TDES_FIFO_MODE |
223 RK_CRYPTO_TDES_BYTESWAP_KEY |
224 RK_CRYPTO_TDES_BYTESWAP_IV;
225 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, dev->mode);
226 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0,
227 dev->ablk_req->info, ivsize);
228 conf_reg = RK_CRYPTO_DESSEL;
229 } else {
230 dev->mode |= RK_CRYPTO_AES_FIFO_MODE |
231 RK_CRYPTO_AES_KEY_CHANGE |
232 RK_CRYPTO_AES_BYTESWAP_KEY |
233 RK_CRYPTO_AES_BYTESWAP_IV;
234 if (ctx->keylen == AES_KEYSIZE_192)
235 dev->mode |= RK_CRYPTO_AES_192BIT_key;
236 else if (ctx->keylen == AES_KEYSIZE_256)
237 dev->mode |= RK_CRYPTO_AES_256BIT_key;
238 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, dev->mode);
239 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0,
240 dev->ablk_req->info, ivsize);
241 }
242 conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
243 RK_CRYPTO_BYTESWAP_BRFIFO;
244 CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
245 CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
246 RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
247}
248
249static void crypto_dma_start(struct rk_crypto_info *dev)
250{
251 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
252 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
253 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
254 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
255 _SBF(RK_CRYPTO_BLOCK_START, 16));
256}
257
258static int rk_set_data_start(struct rk_crypto_info *dev)
259{
260 int err;
261
262 err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
263 if (!err)
264 crypto_dma_start(dev);
265 return err;
266}
267
268static int rk_ablk_start(struct rk_crypto_info *dev)
269{
270 int err;
271
272 spin_lock(&dev->lock);
273 rk_ablk_hw_init(dev);
274 err = rk_set_data_start(dev);
275 spin_unlock(&dev->lock);
276 return err;
277}
278
279static void rk_iv_copyback(struct rk_crypto_info *dev)
280{
281 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(dev->ablk_req);
282 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
283
284 if (ivsize == DES_BLOCK_SIZE)
285 memcpy_fromio(dev->ablk_req->info,
286 dev->reg + RK_CRYPTO_TDES_IV_0, ivsize);
287 else if (ivsize == AES_BLOCK_SIZE)
288 memcpy_fromio(dev->ablk_req->info,
289 dev->reg + RK_CRYPTO_AES_IV_0, ivsize);
290}
291
292/* return:
293 * true some err was occurred
294 * fault no err, continue
295 */
296static int rk_ablk_rx(struct rk_crypto_info *dev)
297{
298 int err = 0;
299
300 dev->unload_data(dev);
301 if (!dev->aligned) {
302 if (!sg_pcopy_from_buffer(dev->ablk_req->dst, dev->nents,
303 dev->addr_vir, dev->count,
304 dev->total - dev->left_bytes -
305 dev->count)) {
306 err = -EINVAL;
307 goto out_rx;
308 }
309 }
310 if (dev->left_bytes) {
311 if (dev->aligned) {
312 if (sg_is_last(dev->sg_src)) {
313 dev_err(dev->dev, "[%s:%d] Lack of data\n",
314 __func__, __LINE__);
315 err = -ENOMEM;
316 goto out_rx;
317 }
318 dev->sg_src = sg_next(dev->sg_src);
319 dev->sg_dst = sg_next(dev->sg_dst);
320 }
321 err = rk_set_data_start(dev);
322 } else {
323 rk_iv_copyback(dev);
324 /* here show the calculation is over without any err */
325 dev->complete(dev, 0);
326 }
327out_rx:
328 return err;
329}
330
331static int rk_ablk_cra_init(struct crypto_tfm *tfm)
332{
333 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
334 struct crypto_alg *alg = tfm->__crt_alg;
335 struct rk_crypto_tmp *algt;
336
337 algt = container_of(alg, struct rk_crypto_tmp, alg);
338
339 ctx->dev = algt->dev;
340 ctx->dev->align_size = crypto_tfm_alg_alignmask(tfm) + 1;
341 ctx->dev->start = rk_ablk_start;
342 ctx->dev->update = rk_ablk_rx;
343 ctx->dev->complete = rk_crypto_complete;
344 ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
345
346 return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
347}
348
349static void rk_ablk_cra_exit(struct crypto_tfm *tfm)
350{
351 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
352
353 free_page((unsigned long)ctx->dev->addr_vir);
354 ctx->dev->disable_clk(ctx->dev);
355}
356
357struct rk_crypto_tmp rk_ecb_aes_alg = {
358 .alg = {
359 .cra_name = "ecb(aes)",
360 .cra_driver_name = "ecb-aes-rk",
361 .cra_priority = 300,
362 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
363 CRYPTO_ALG_ASYNC,
364 .cra_blocksize = AES_BLOCK_SIZE,
365 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
366 .cra_alignmask = 0x0f,
367 .cra_type = &crypto_ablkcipher_type,
368 .cra_module = THIS_MODULE,
369 .cra_init = rk_ablk_cra_init,
370 .cra_exit = rk_ablk_cra_exit,
371 .cra_u.ablkcipher = {
372 .min_keysize = AES_MIN_KEY_SIZE,
373 .max_keysize = AES_MAX_KEY_SIZE,
374 .setkey = rk_aes_setkey,
375 .encrypt = rk_aes_ecb_encrypt,
376 .decrypt = rk_aes_ecb_decrypt,
377 }
378 }
379};
380
381struct rk_crypto_tmp rk_cbc_aes_alg = {
382 .alg = {
383 .cra_name = "cbc(aes)",
384 .cra_driver_name = "cbc-aes-rk",
385 .cra_priority = 300,
386 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
387 CRYPTO_ALG_ASYNC,
388 .cra_blocksize = AES_BLOCK_SIZE,
389 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
390 .cra_alignmask = 0x0f,
391 .cra_type = &crypto_ablkcipher_type,
392 .cra_module = THIS_MODULE,
393 .cra_init = rk_ablk_cra_init,
394 .cra_exit = rk_ablk_cra_exit,
395 .cra_u.ablkcipher = {
396 .min_keysize = AES_MIN_KEY_SIZE,
397 .max_keysize = AES_MAX_KEY_SIZE,
398 .ivsize = AES_BLOCK_SIZE,
399 .setkey = rk_aes_setkey,
400 .encrypt = rk_aes_cbc_encrypt,
401 .decrypt = rk_aes_cbc_decrypt,
402 }
403 }
404};
405
406struct rk_crypto_tmp rk_ecb_des_alg = {
407 .alg = {
408 .cra_name = "ecb(des)",
409 .cra_driver_name = "ecb-des-rk",
410 .cra_priority = 300,
411 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
412 CRYPTO_ALG_ASYNC,
413 .cra_blocksize = DES_BLOCK_SIZE,
414 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
415 .cra_alignmask = 0x07,
416 .cra_type = &crypto_ablkcipher_type,
417 .cra_module = THIS_MODULE,
418 .cra_init = rk_ablk_cra_init,
419 .cra_exit = rk_ablk_cra_exit,
420 .cra_u.ablkcipher = {
421 .min_keysize = DES_KEY_SIZE,
422 .max_keysize = DES_KEY_SIZE,
423 .setkey = rk_tdes_setkey,
424 .encrypt = rk_des_ecb_encrypt,
425 .decrypt = rk_des_ecb_decrypt,
426 }
427 }
428};
429
430struct rk_crypto_tmp rk_cbc_des_alg = {
431 .alg = {
432 .cra_name = "cbc(des)",
433 .cra_driver_name = "cbc-des-rk",
434 .cra_priority = 300,
435 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
436 CRYPTO_ALG_ASYNC,
437 .cra_blocksize = DES_BLOCK_SIZE,
438 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
439 .cra_alignmask = 0x07,
440 .cra_type = &crypto_ablkcipher_type,
441 .cra_module = THIS_MODULE,
442 .cra_init = rk_ablk_cra_init,
443 .cra_exit = rk_ablk_cra_exit,
444 .cra_u.ablkcipher = {
445 .min_keysize = DES_KEY_SIZE,
446 .max_keysize = DES_KEY_SIZE,
447 .ivsize = DES_BLOCK_SIZE,
448 .setkey = rk_tdes_setkey,
449 .encrypt = rk_des_cbc_encrypt,
450 .decrypt = rk_des_cbc_decrypt,
451 }
452 }
453};
454
455struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
456 .alg = {
457 .cra_name = "ecb(des3_ede)",
458 .cra_driver_name = "ecb-des3-ede-rk",
459 .cra_priority = 300,
460 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
461 CRYPTO_ALG_ASYNC,
462 .cra_blocksize = DES_BLOCK_SIZE,
463 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
464 .cra_alignmask = 0x07,
465 .cra_type = &crypto_ablkcipher_type,
466 .cra_module = THIS_MODULE,
467 .cra_init = rk_ablk_cra_init,
468 .cra_exit = rk_ablk_cra_exit,
469 .cra_u.ablkcipher = {
470 .min_keysize = DES3_EDE_KEY_SIZE,
471 .max_keysize = DES3_EDE_KEY_SIZE,
472 .ivsize = DES_BLOCK_SIZE,
473 .setkey = rk_tdes_setkey,
474 .encrypt = rk_des3_ede_ecb_encrypt,
475 .decrypt = rk_des3_ede_ecb_decrypt,
476 }
477 }
478};
479
480struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
481 .alg = {
482 .cra_name = "cbc(des3_ede)",
483 .cra_driver_name = "cbc-des3-ede-rk",
484 .cra_priority = 300,
485 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
486 CRYPTO_ALG_ASYNC,
487 .cra_blocksize = DES_BLOCK_SIZE,
488 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
489 .cra_alignmask = 0x07,
490 .cra_type = &crypto_ablkcipher_type,
491 .cra_module = THIS_MODULE,
492 .cra_init = rk_ablk_cra_init,
493 .cra_exit = rk_ablk_cra_exit,
494 .cra_u.ablkcipher = {
495 .min_keysize = DES3_EDE_KEY_SIZE,
496 .max_keysize = DES3_EDE_KEY_SIZE,
497 .ivsize = DES_BLOCK_SIZE,
498 .setkey = rk_tdes_setkey,
499 .encrypt = rk_des3_ede_cbc_encrypt,
500 .decrypt = rk_des3_ede_cbc_decrypt,
501 }
502 }
503};