aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorAndreas Westin <andreas.westin@stericsson.com>2012-04-30 04:11:17 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2012-05-04 05:04:51 -0400
commit2789c08fffeae270820dda5d096634aecc810af5 (patch)
tree113e76a6908b4b786374c53f010c472a3d663514 /drivers/crypto
parent028fdd87b79a45544a2b6902989f2cbbf73a3eac (diff)
crypto: ux500 - Add driver for CRYP hardware
This adds a driver for the ST-Ericsson ux500 crypto hardware module. It supports AES, DES and 3DES, the driver implements support for AES-ECB,CBC and CTR. Acked-by: Linus Walleij <linus.walleij@linaro.org> Signed-off-by: Andreas Westin <andreas.westin@stericsson.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/Kconfig11
-rw-r--r--drivers/crypto/Makefile1
-rw-r--r--drivers/crypto/ux500/Kconfig21
-rw-r--r--drivers/crypto/ux500/Makefile7
-rw-r--r--drivers/crypto/ux500/cryp/Makefile13
-rw-r--r--drivers/crypto/ux500/cryp/cryp.c391
-rw-r--r--drivers/crypto/ux500/cryp/cryp.h308
-rw-r--r--drivers/crypto/ux500/cryp/cryp_core.c1785
-rw-r--r--drivers/crypto/ux500/cryp/cryp_irq.c45
-rw-r--r--drivers/crypto/ux500/cryp/cryp_irq.h31
-rw-r--r--drivers/crypto/ux500/cryp/cryp_irqp.h125
-rw-r--r--drivers/crypto/ux500/cryp/cryp_p.h124
12 files changed, 2862 insertions, 0 deletions
diff --git a/drivers/crypto/Kconfig b/drivers/crypto/Kconfig
index ab9abb46d01a..69fdf1861100 100644
--- a/drivers/crypto/Kconfig
+++ b/drivers/crypto/Kconfig
@@ -295,4 +295,15 @@ config CRYPTO_DEV_TEGRA_AES
295 To compile this driver as a module, choose M here: the module 295 To compile this driver as a module, choose M here: the module
296 will be called tegra-aes. 296 will be called tegra-aes.
297 297
298config CRYPTO_DEV_UX500
299 tristate "Driver for ST-Ericsson UX500 crypto hardware acceleration"
300 depends on ARCH_U8500
301 select CRYPTO_ALGAPI
302 help
303 Driver for ST-Ericsson UX500 crypto engine.
304
305if CRYPTO_DEV_UX500
306 source "drivers/crypto/ux500/Kconfig"
307endif # if CRYPTO_DEV_UX500
308
298endif # CRYPTO_HW 309endif # CRYPTO_HW
diff --git a/drivers/crypto/Makefile b/drivers/crypto/Makefile
index f3e64eadd7af..01390325d72d 100644
--- a/drivers/crypto/Makefile
+++ b/drivers/crypto/Makefile
@@ -14,3 +14,4 @@ obj-$(CONFIG_CRYPTO_DEV_OMAP_AES) += omap-aes.o
14obj-$(CONFIG_CRYPTO_DEV_PICOXCELL) += picoxcell_crypto.o 14obj-$(CONFIG_CRYPTO_DEV_PICOXCELL) += picoxcell_crypto.o
15obj-$(CONFIG_CRYPTO_DEV_S5P) += s5p-sss.o 15obj-$(CONFIG_CRYPTO_DEV_S5P) += s5p-sss.o
16obj-$(CONFIG_CRYPTO_DEV_TEGRA_AES) += tegra-aes.o 16obj-$(CONFIG_CRYPTO_DEV_TEGRA_AES) += tegra-aes.o
17obj-$(CONFIG_CRYPTO_DEV_UX500) += ux500/ \ No newline at end of file
diff --git a/drivers/crypto/ux500/Kconfig b/drivers/crypto/ux500/Kconfig
new file mode 100644
index 000000000000..b893fa061da4
--- /dev/null
+++ b/drivers/crypto/ux500/Kconfig
@@ -0,0 +1,21 @@
1#
2# Copyright (C) ST-Ericsson SA 2010
3# Author: Shujuan Chen (shujuan.chen@stericsson.com)
4# License terms: GNU General Public License (GPL) version 2
5#
6
7config CRYPTO_DEV_UX500_CRYP
8 tristate "UX500 crypto driver for CRYP block"
9 depends on CRYPTO_DEV_UX500
10 select CRYPTO_DES
11 help
12 This selects the crypto driver for the UX500_CRYP hardware. It supports
13 AES-ECB, CBC and CTR with keys sizes of 128, 192 and 256 bit sizes.
14
15config CRYPTO_DEV_UX500_DEBUG
16 bool "Activate ux500 platform debug-mode for crypto and hash block"
17 depends on CRYPTO_DEV_UX500_CRYP || CRYPTO_DEV_UX500_HASH
18 default n
19 help
20 Say Y if you want to add debug prints to ux500_hash and
21 ux500_cryp devices.
diff --git a/drivers/crypto/ux500/Makefile b/drivers/crypto/ux500/Makefile
new file mode 100644
index 000000000000..beb4d37db7b4
--- /dev/null
+++ b/drivers/crypto/ux500/Makefile
@@ -0,0 +1,7 @@
1#
2# Copyright (C) ST-Ericsson SA 2010
3# Author: Shujuan Chen (shujuan.chen@stericsson.com)
4# License terms: GNU General Public License (GPL) version 2
5#
6
7obj-$(CONFIG_CRYPTO_DEV_UX500_CRYP) += cryp/
diff --git a/drivers/crypto/ux500/cryp/Makefile b/drivers/crypto/ux500/cryp/Makefile
new file mode 100644
index 000000000000..e5d362a6f680
--- /dev/null
+++ b/drivers/crypto/ux500/cryp/Makefile
@@ -0,0 +1,13 @@
1#/*
2# * Copyright (C) ST-Ericsson SA 2010
3# * Author: shujuan.chen@stericsson.com for ST-Ericsson.
4# * License terms: GNU General Public License (GPL) version 2 */
5
6ifdef CONFIG_CRYPTO_DEV_UX500_DEBUG
7CFLAGS_cryp_core.o := -DDEBUG -O0
8CFLAGS_cryp.o := -DDEBUG -O0
9CFLAGS_cryp_irq.o := -DDEBUG -O0
10endif
11
12obj-$(CONFIG_CRYPTO_DEV_UX500_CRYP) += ux500_cryp.o
13ux500_cryp-objs := cryp.o cryp_irq.o cryp_core.o
diff --git a/drivers/crypto/ux500/cryp/cryp.c b/drivers/crypto/ux500/cryp/cryp.c
new file mode 100644
index 000000000000..bd206ffc25df
--- /dev/null
+++ b/drivers/crypto/ux500/cryp/cryp.c
@@ -0,0 +1,391 @@
1/**
2 * Copyright (C) ST-Ericsson SA 2010
3 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
4 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
5 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
6 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
7 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
8 * License terms: GNU General Public License (GPL) version 2
9 */
10
11#include <linux/errno.h>
12#include <linux/kernel.h>
13#include <linux/types.h>
14
15#include <mach/hardware.h>
16
17#include "cryp_p.h"
18#include "cryp.h"
19
20/**
21 * cryp_wait_until_done - wait until the device logic is not busy
22 */
23void cryp_wait_until_done(struct cryp_device_data *device_data)
24{
25 while (cryp_is_logic_busy(device_data))
26 cpu_relax();
27}
28
29/**
30 * cryp_check - This routine checks Peripheral and PCell Id
31 * @device_data: Pointer to the device data struct for base address.
32 */
33int cryp_check(struct cryp_device_data *device_data)
34{
35 int peripheralid2 = 0;
36
37 if (NULL == device_data)
38 return -EINVAL;
39
40 if (cpu_is_u8500())
41 peripheralid2 = CRYP_PERIPHERAL_ID2_DB8500;
42 else if (cpu_is_u5500())
43 peripheralid2 = CRYP_PERIPHERAL_ID2_DB5500;
44
45 /* Check Peripheral and Pcell Id Register for CRYP */
46 if ((CRYP_PERIPHERAL_ID0 ==
47 readl_relaxed(&device_data->base->periphId0))
48 && (CRYP_PERIPHERAL_ID1 ==
49 readl_relaxed(&device_data->base->periphId1))
50 && (peripheralid2 ==
51 readl_relaxed(&device_data->base->periphId2))
52 && (CRYP_PERIPHERAL_ID3 ==
53 readl_relaxed(&device_data->base->periphId3))
54 && (CRYP_PCELL_ID0 ==
55 readl_relaxed(&device_data->base->pcellId0))
56 && (CRYP_PCELL_ID1 ==
57 readl_relaxed(&device_data->base->pcellId1))
58 && (CRYP_PCELL_ID2 ==
59 readl_relaxed(&device_data->base->pcellId2))
60 && (CRYP_PCELL_ID3 ==
61 readl_relaxed(&device_data->base->pcellId3))) {
62 return 0;
63 }
64
65 return -EPERM;
66}
67
68/**
69 * cryp_activity - This routine enables/disable the cryptography function.
70 * @device_data: Pointer to the device data struct for base address.
71 * @cryp_crypen: Enable/Disable functionality
72 */
73void cryp_activity(struct cryp_device_data *device_data,
74 enum cryp_crypen cryp_crypen)
75{
76 CRYP_PUT_BITS(&device_data->base->cr,
77 cryp_crypen,
78 CRYP_CR_CRYPEN_POS,
79 CRYP_CR_CRYPEN_MASK);
80}
81
82/**
83 * cryp_flush_inoutfifo - Resets both the input and the output FIFOs
84 * @device_data: Pointer to the device data struct for base address.
85 */
86void cryp_flush_inoutfifo(struct cryp_device_data *device_data)
87{
88 /*
89 * We always need to disble the hardware before trying to flush the
90 * FIFO. This is something that isn't written in the design
91 * specification, but we have been informed by the hardware designers
92 * that this must be done.
93 */
94 cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
95 cryp_wait_until_done(device_data);
96
97 CRYP_SET_BITS(&device_data->base->cr, CRYP_CR_FFLUSH_MASK);
98 /*
99 * CRYP_SR_INFIFO_READY_MASK is the expected value on the status
100 * register when starting a new calculation, which means Input FIFO is
101 * not full and input FIFO is empty.
102 */
103 while (readl_relaxed(&device_data->base->sr) !=
104 CRYP_SR_INFIFO_READY_MASK)
105 cpu_relax();
106}
107
108/**
109 * cryp_set_configuration - This routine set the cr CRYP IP
110 * @device_data: Pointer to the device data struct for base address.
111 * @cryp_config: Pointer to the configuration parameter
112 * @control_register: The control register to be written later on.
113 */
114int cryp_set_configuration(struct cryp_device_data *device_data,
115 struct cryp_config *cryp_config,
116 u32 *control_register)
117{
118 u32 cr_for_kse;
119
120 if (NULL == device_data || NULL == cryp_config)
121 return -EINVAL;
122
123 *control_register |= (cryp_config->keysize << CRYP_CR_KEYSIZE_POS);
124
125 /* Prepare key for decryption in AES_ECB and AES_CBC mode. */
126 if ((CRYP_ALGORITHM_DECRYPT == cryp_config->algodir) &&
127 ((CRYP_ALGO_AES_ECB == cryp_config->algomode) ||
128 (CRYP_ALGO_AES_CBC == cryp_config->algomode))) {
129 cr_for_kse = *control_register;
130 /*
131 * This seems a bit odd, but it is indeed needed to set this to
132 * encrypt even though it is a decryption that we are doing. It
133 * also mentioned in the design spec that you need to do this.
134 * After the keyprepartion for decrypting is done you should set
135 * algodir back to decryption, which is done outside this if
136 * statement.
137 *
138 * According to design specification we should set mode ECB
139 * during key preparation even though we might be running CBC
140 * when enter this function.
141 *
142 * Writing to KSE_ENABLED will drop CRYPEN when key preparation
143 * is done. Therefore we need to set CRYPEN again outside this
144 * if statement when running decryption.
145 */
146 cr_for_kse |= ((CRYP_ALGORITHM_ENCRYPT << CRYP_CR_ALGODIR_POS) |
147 (CRYP_ALGO_AES_ECB << CRYP_CR_ALGOMODE_POS) |
148 (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS) |
149 (KSE_ENABLED << CRYP_CR_KSE_POS));
150
151 writel_relaxed(cr_for_kse, &device_data->base->cr);
152 cryp_wait_until_done(device_data);
153 }
154
155 *control_register |=
156 ((cryp_config->algomode << CRYP_CR_ALGOMODE_POS) |
157 (cryp_config->algodir << CRYP_CR_ALGODIR_POS));
158
159 return 0;
160}
161
162/**
163 * cryp_configure_protection - set the protection bits in the CRYP logic.
164 * @device_data: Pointer to the device data struct for base address.
165 * @p_protect_config: Pointer to the protection mode and
166 * secure mode configuration
167 */
168int cryp_configure_protection(struct cryp_device_data *device_data,
169 struct cryp_protection_config *p_protect_config)
170{
171 if (NULL == p_protect_config)
172 return -EINVAL;
173
174 CRYP_WRITE_BIT(&device_data->base->cr,
175 (u32) p_protect_config->secure_access,
176 CRYP_CR_SECURE_MASK);
177 CRYP_PUT_BITS(&device_data->base->cr,
178 p_protect_config->privilege_access,
179 CRYP_CR_PRLG_POS,
180 CRYP_CR_PRLG_MASK);
181
182 return 0;
183}
184
185/**
186 * cryp_is_logic_busy - returns the busy status of the CRYP logic
187 * @device_data: Pointer to the device data struct for base address.
188 */
189int cryp_is_logic_busy(struct cryp_device_data *device_data)
190{
191 return CRYP_TEST_BITS(&device_data->base->sr,
192 CRYP_SR_BUSY_MASK);
193}
194
195/**
196 * cryp_configure_for_dma - configures the CRYP IP for DMA operation
197 * @device_data: Pointer to the device data struct for base address.
198 * @dma_req: Specifies the DMA request type value.
199 */
200void cryp_configure_for_dma(struct cryp_device_data *device_data,
201 enum cryp_dma_req_type dma_req)
202{
203 CRYP_SET_BITS(&device_data->base->dmacr,
204 (u32) dma_req);
205}
206
207/**
208 * cryp_configure_key_values - configures the key values for CRYP operations
209 * @device_data: Pointer to the device data struct for base address.
210 * @key_reg_index: Key value index register
211 * @key_value: The key value struct
212 */
213int cryp_configure_key_values(struct cryp_device_data *device_data,
214 enum cryp_key_reg_index key_reg_index,
215 struct cryp_key_value key_value)
216{
217 while (cryp_is_logic_busy(device_data))
218 cpu_relax();
219
220 switch (key_reg_index) {
221 case CRYP_KEY_REG_1:
222 writel_relaxed(key_value.key_value_left,
223 &device_data->base->key_1_l);
224 writel_relaxed(key_value.key_value_right,
225 &device_data->base->key_1_r);
226 break;
227 case CRYP_KEY_REG_2:
228 writel_relaxed(key_value.key_value_left,
229 &device_data->base->key_2_l);
230 writel_relaxed(key_value.key_value_right,
231 &device_data->base->key_2_r);
232 break;
233 case CRYP_KEY_REG_3:
234 writel_relaxed(key_value.key_value_left,
235 &device_data->base->key_3_l);
236 writel_relaxed(key_value.key_value_right,
237 &device_data->base->key_3_r);
238 break;
239 case CRYP_KEY_REG_4:
240 writel_relaxed(key_value.key_value_left,
241 &device_data->base->key_4_l);
242 writel_relaxed(key_value.key_value_right,
243 &device_data->base->key_4_r);
244 break;
245 default:
246 return -EINVAL;
247 }
248
249 return 0;
250}
251
252/**
253 * cryp_configure_init_vector - configures the initialization vector register
254 * @device_data: Pointer to the device data struct for base address.
255 * @init_vector_index: Specifies the index of the init vector.
256 * @init_vector_value: Specifies the value for the init vector.
257 */
258int cryp_configure_init_vector(struct cryp_device_data *device_data,
259 enum cryp_init_vector_index
260 init_vector_index,
261 struct cryp_init_vector_value
262 init_vector_value)
263{
264 while (cryp_is_logic_busy(device_data))
265 cpu_relax();
266
267 switch (init_vector_index) {
268 case CRYP_INIT_VECTOR_INDEX_0:
269 writel_relaxed(init_vector_value.init_value_left,
270 &device_data->base->init_vect_0_l);
271 writel_relaxed(init_vector_value.init_value_right,
272 &device_data->base->init_vect_0_r);
273 break;
274 case CRYP_INIT_VECTOR_INDEX_1:
275 writel_relaxed(init_vector_value.init_value_left,
276 &device_data->base->init_vect_1_l);
277 writel_relaxed(init_vector_value.init_value_right,
278 &device_data->base->init_vect_1_r);
279 break;
280 default:
281 return -EINVAL;
282 }
283
284 return 0;
285}
286
287/**
288 * cryp_save_device_context - Store hardware registers and
289 * other device context parameter
290 * @device_data: Pointer to the device data struct for base address.
291 * @ctx: Crypto device context
292 */
293void cryp_save_device_context(struct cryp_device_data *device_data,
294 struct cryp_device_context *ctx,
295 int cryp_mode)
296{
297 enum cryp_algo_mode algomode;
298 struct cryp_register *src_reg = device_data->base;
299 struct cryp_config *config =
300 (struct cryp_config *)device_data->current_ctx;
301
302 /*
303 * Always start by disable the hardware and wait for it to finish the
304 * ongoing calculations before trying to reprogram it.
305 */
306 cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
307 cryp_wait_until_done(device_data);
308
309 if (cryp_mode == CRYP_MODE_DMA)
310 cryp_configure_for_dma(device_data, CRYP_DMA_DISABLE_BOTH);
311
312 if (CRYP_TEST_BITS(&src_reg->sr, CRYP_SR_IFEM_MASK) == 0)
313 ctx->din = readl_relaxed(&src_reg->din);
314
315 ctx->cr = readl_relaxed(&src_reg->cr) & CRYP_CR_CONTEXT_SAVE_MASK;
316
317 switch (config->keysize) {
318 case CRYP_KEY_SIZE_256:
319 ctx->key_4_l = readl_relaxed(&src_reg->key_4_l);
320 ctx->key_4_r = readl_relaxed(&src_reg->key_4_r);
321
322 case CRYP_KEY_SIZE_192:
323 ctx->key_3_l = readl_relaxed(&src_reg->key_3_l);
324 ctx->key_3_r = readl_relaxed(&src_reg->key_3_r);
325
326 case CRYP_KEY_SIZE_128:
327 ctx->key_2_l = readl_relaxed(&src_reg->key_2_l);
328 ctx->key_2_r = readl_relaxed(&src_reg->key_2_r);
329
330 default:
331 ctx->key_1_l = readl_relaxed(&src_reg->key_1_l);
332 ctx->key_1_r = readl_relaxed(&src_reg->key_1_r);
333 }
334
335 /* Save IV for CBC mode for both AES and DES. */
336 algomode = ((ctx->cr & CRYP_CR_ALGOMODE_MASK) >> CRYP_CR_ALGOMODE_POS);
337 if (algomode == CRYP_ALGO_TDES_CBC ||
338 algomode == CRYP_ALGO_DES_CBC ||
339 algomode == CRYP_ALGO_AES_CBC) {
340 ctx->init_vect_0_l = readl_relaxed(&src_reg->init_vect_0_l);
341 ctx->init_vect_0_r = readl_relaxed(&src_reg->init_vect_0_r);
342 ctx->init_vect_1_l = readl_relaxed(&src_reg->init_vect_1_l);
343 ctx->init_vect_1_r = readl_relaxed(&src_reg->init_vect_1_r);
344 }
345}
346
347/**
348 * cryp_restore_device_context - Restore hardware registers and
349 * other device context parameter
350 * @device_data: Pointer to the device data struct for base address.
351 * @ctx: Crypto device context
352 */
353void cryp_restore_device_context(struct cryp_device_data *device_data,
354 struct cryp_device_context *ctx)
355{
356 struct cryp_register *reg = device_data->base;
357 struct cryp_config *config =
358 (struct cryp_config *)device_data->current_ctx;
359
360 /*
361 * Fall through for all items in switch statement. DES is captured in
362 * the default.
363 */
364 switch (config->keysize) {
365 case CRYP_KEY_SIZE_256:
366 writel_relaxed(ctx->key_4_l, &reg->key_4_l);
367 writel_relaxed(ctx->key_4_r, &reg->key_4_r);
368
369 case CRYP_KEY_SIZE_192:
370 writel_relaxed(ctx->key_3_l, &reg->key_3_l);
371 writel_relaxed(ctx->key_3_r, &reg->key_3_r);
372
373 case CRYP_KEY_SIZE_128:
374 writel_relaxed(ctx->key_2_l, &reg->key_2_l);
375 writel_relaxed(ctx->key_2_r, &reg->key_2_r);
376
377 default:
378 writel_relaxed(ctx->key_1_l, &reg->key_1_l);
379 writel_relaxed(ctx->key_1_r, &reg->key_1_r);
380 }
381
382 /* Restore IV for CBC mode for AES and DES. */
383 if (config->algomode == CRYP_ALGO_TDES_CBC ||
384 config->algomode == CRYP_ALGO_DES_CBC ||
385 config->algomode == CRYP_ALGO_AES_CBC) {
386 writel_relaxed(ctx->init_vect_0_l, &reg->init_vect_0_l);
387 writel_relaxed(ctx->init_vect_0_r, &reg->init_vect_0_r);
388 writel_relaxed(ctx->init_vect_1_l, &reg->init_vect_1_l);
389 writel_relaxed(ctx->init_vect_1_r, &reg->init_vect_1_r);
390 }
391}
diff --git a/drivers/crypto/ux500/cryp/cryp.h b/drivers/crypto/ux500/cryp/cryp.h
new file mode 100644
index 000000000000..14cfd05b777a
--- /dev/null
+++ b/drivers/crypto/ux500/cryp/cryp.h
@@ -0,0 +1,308 @@
1/**
2 * Copyright (C) ST-Ericsson SA 2010
3 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
4 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
5 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
6 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
7 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
8 * License terms: GNU General Public License (GPL) version 2
9 */
10
11#ifndef _CRYP_H_
12#define _CRYP_H_
13
14#include <linux/completion.h>
15#include <linux/dmaengine.h>
16#include <linux/klist.h>
17#include <linux/mutex.h>
18
19#define DEV_DBG_NAME "crypX crypX:"
20
21/* CRYP enable/disable */
22enum cryp_crypen {
23 CRYP_CRYPEN_DISABLE = 0,
24 CRYP_CRYPEN_ENABLE = 1
25};
26
27/* CRYP Start Computation enable/disable */
28enum cryp_start {
29 CRYP_START_DISABLE = 0,
30 CRYP_START_ENABLE = 1
31};
32
33/* CRYP Init Signal enable/disable */
34enum cryp_init {
35 CRYP_INIT_DISABLE = 0,
36 CRYP_INIT_ENABLE = 1
37};
38
39/* Cryp State enable/disable */
40enum cryp_state {
41 CRYP_STATE_DISABLE = 0,
42 CRYP_STATE_ENABLE = 1
43};
44
45/* Key preparation bit enable */
46enum cryp_key_prep {
47 KSE_DISABLED = 0,
48 KSE_ENABLED = 1
49};
50
51/* Key size for AES */
52#define CRYP_KEY_SIZE_128 (0)
53#define CRYP_KEY_SIZE_192 (1)
54#define CRYP_KEY_SIZE_256 (2)
55
56/* AES modes */
57enum cryp_algo_mode {
58 CRYP_ALGO_TDES_ECB,
59 CRYP_ALGO_TDES_CBC,
60 CRYP_ALGO_DES_ECB,
61 CRYP_ALGO_DES_CBC,
62 CRYP_ALGO_AES_ECB,
63 CRYP_ALGO_AES_CBC,
64 CRYP_ALGO_AES_CTR,
65 CRYP_ALGO_AES_XTS
66};
67
68/* Cryp Encryption or Decryption */
69enum cryp_algorithm_dir {
70 CRYP_ALGORITHM_ENCRYPT,
71 CRYP_ALGORITHM_DECRYPT
72};
73
74/* Hardware access method */
75enum cryp_mode {
76 CRYP_MODE_POLLING,
77 CRYP_MODE_INTERRUPT,
78 CRYP_MODE_DMA
79};
80
81/**
82 * struct cryp_config -
83 * @keysize: Key size for AES
84 * @algomode: AES modes
85 * @algodir: Cryp Encryption or Decryption
86 *
87 * CRYP configuration structure to be passed to set configuration
88 */
89struct cryp_config {
90 int keysize;
91 enum cryp_algo_mode algomode;
92 enum cryp_algorithm_dir algodir;
93};
94
95/**
96 * struct cryp_protection_config -
97 * @privilege_access: Privileged cryp state enable/disable
98 * @secure_access: Secure cryp state enable/disable
99 *
100 * Protection configuration structure for setting privilage access
101 */
102struct cryp_protection_config {
103 enum cryp_state privilege_access;
104 enum cryp_state secure_access;
105};
106
107/* Cryp status */
108enum cryp_status_id {
109 CRYP_STATUS_BUSY = 0x10,
110 CRYP_STATUS_OUTPUT_FIFO_FULL = 0x08,
111 CRYP_STATUS_OUTPUT_FIFO_NOT_EMPTY = 0x04,
112 CRYP_STATUS_INPUT_FIFO_NOT_FULL = 0x02,
113 CRYP_STATUS_INPUT_FIFO_EMPTY = 0x01
114};
115
116/* Cryp DMA interface */
117enum cryp_dma_req_type {
118 CRYP_DMA_DISABLE_BOTH,
119 CRYP_DMA_ENABLE_IN_DATA,
120 CRYP_DMA_ENABLE_OUT_DATA,
121 CRYP_DMA_ENABLE_BOTH_DIRECTIONS
122};
123
124enum cryp_dma_channel {
125 CRYP_DMA_RX = 0,
126 CRYP_DMA_TX
127};
128
129/* Key registers */
130enum cryp_key_reg_index {
131 CRYP_KEY_REG_1,
132 CRYP_KEY_REG_2,
133 CRYP_KEY_REG_3,
134 CRYP_KEY_REG_4
135};
136
137/* Key register left and right */
138struct cryp_key_value {
139 u32 key_value_left;
140 u32 key_value_right;
141};
142
143/* Cryp Initialization structure */
144enum cryp_init_vector_index {
145 CRYP_INIT_VECTOR_INDEX_0,
146 CRYP_INIT_VECTOR_INDEX_1
147};
148
149/* struct cryp_init_vector_value -
150 * @init_value_left
151 * @init_value_right
152 * */
153struct cryp_init_vector_value {
154 u32 init_value_left;
155 u32 init_value_right;
156};
157
158/**
159 * struct cryp_device_context - structure for a cryp context.
160 * @cr: control register
161 * @dmacr: DMA control register
162 * @imsc: Interrupt mask set/clear register
163 * @key_1_l: Key 1l register
164 * @key_1_r: Key 1r register
165 * @key_2_l: Key 2l register
166 * @key_2_r: Key 2r register
167 * @key_3_l: Key 3l register
168 * @key_3_r: Key 3r register
169 * @key_4_l: Key 4l register
170 * @key_4_r: Key 4r register
171 * @init_vect_0_l: Initialization vector 0l register
172 * @init_vect_0_r: Initialization vector 0r register
173 * @init_vect_1_l: Initialization vector 1l register
174 * @init_vect_1_r: Initialization vector 0r register
175 * @din: Data in register
176 * @dout: Data out register
177 *
178 * CRYP power management specifc structure.
179 */
180struct cryp_device_context {
181 u32 cr;
182 u32 dmacr;
183 u32 imsc;
184
185 u32 key_1_l;
186 u32 key_1_r;
187 u32 key_2_l;
188 u32 key_2_r;
189 u32 key_3_l;
190 u32 key_3_r;
191 u32 key_4_l;
192 u32 key_4_r;
193
194 u32 init_vect_0_l;
195 u32 init_vect_0_r;
196 u32 init_vect_1_l;
197 u32 init_vect_1_r;
198
199 u32 din;
200 u32 dout;
201};
202
203struct cryp_dma {
204 dma_cap_mask_t mask;
205 struct completion cryp_dma_complete;
206 struct dma_chan *chan_cryp2mem;
207 struct dma_chan *chan_mem2cryp;
208 struct stedma40_chan_cfg *cfg_cryp2mem;
209 struct stedma40_chan_cfg *cfg_mem2cryp;
210 int sg_src_len;
211 int sg_dst_len;
212 struct scatterlist *sg_src;
213 struct scatterlist *sg_dst;
214 int nents_src;
215 int nents_dst;
216};
217
218/**
219 * struct cryp_device_data - structure for a cryp device.
220 * @base: Pointer to the hardware base address.
221 * @dev: Pointer to the devices dev structure.
222 * @clk: Pointer to the device's clock control.
223 * @pwr_regulator: Pointer to the device's power control.
224 * @power_status: Current status of the power.
225 * @ctx_lock: Lock for current_ctx.
226 * @current_ctx: Pointer to the currently allocated context.
227 * @list_node: For inclusion into a klist.
228 * @dma: The dma structure holding channel configuration.
229 * @power_state: TRUE = power state on, FALSE = power state off.
230 * @power_state_spinlock: Spinlock for power_state.
231 * @restore_dev_ctx: TRUE = saved ctx, FALSE = no saved ctx.
232 */
233struct cryp_device_data {
234 struct cryp_register __iomem *base;
235 struct device *dev;
236 struct clk *clk;
237 struct regulator *pwr_regulator;
238 int power_status;
239 struct spinlock ctx_lock;
240 struct cryp_ctx *current_ctx;
241 struct klist_node list_node;
242 struct cryp_dma dma;
243 bool power_state;
244 struct spinlock power_state_spinlock;
245 bool restore_dev_ctx;
246};
247
248void cryp_wait_until_done(struct cryp_device_data *device_data);
249
250/* Initialization functions */
251
252int cryp_check(struct cryp_device_data *device_data);
253
254void cryp_activity(struct cryp_device_data *device_data,
255 enum cryp_crypen cryp_crypen);
256
257void cryp_flush_inoutfifo(struct cryp_device_data *device_data);
258
259int cryp_set_configuration(struct cryp_device_data *device_data,
260 struct cryp_config *cryp_config,
261 u32 *control_register);
262
263void cryp_configure_for_dma(struct cryp_device_data *device_data,
264 enum cryp_dma_req_type dma_req);
265
266int cryp_configure_key_values(struct cryp_device_data *device_data,
267 enum cryp_key_reg_index key_reg_index,
268 struct cryp_key_value key_value);
269
270int cryp_configure_init_vector(struct cryp_device_data *device_data,
271 enum cryp_init_vector_index
272 init_vector_index,
273 struct cryp_init_vector_value
274 init_vector_value);
275
276int cryp_configure_protection(struct cryp_device_data *device_data,
277 struct cryp_protection_config *p_protect_config);
278
279/* Power management funtions */
280void cryp_save_device_context(struct cryp_device_data *device_data,
281 struct cryp_device_context *ctx,
282 int cryp_mode);
283
284void cryp_restore_device_context(struct cryp_device_data *device_data,
285 struct cryp_device_context *ctx);
286
287/* Data transfer and status bits. */
288int cryp_is_logic_busy(struct cryp_device_data *device_data);
289
290int cryp_get_status(struct cryp_device_data *device_data);
291
292/**
293 * cryp_write_indata - This routine writes 32 bit data into the data input
294 * register of the cryptography IP.
295 * @device_data: Pointer to the device data struct for base address.
296 * @write_data: Data to write.
297 */
298int cryp_write_indata(struct cryp_device_data *device_data, u32 write_data);
299
300/**
301 * cryp_read_outdata - This routine reads the data from the data output
302 * register of the CRYP logic
303 * @device_data: Pointer to the device data struct for base address.
304 * @read_data: Read the data from the output FIFO.
305 */
306int cryp_read_outdata(struct cryp_device_data *device_data, u32 *read_data);
307
308#endif /* _CRYP_H_ */
diff --git a/drivers/crypto/ux500/cryp/cryp_core.c b/drivers/crypto/ux500/cryp/cryp_core.c
new file mode 100644
index 000000000000..7051e000163b
--- /dev/null
+++ b/drivers/crypto/ux500/cryp/cryp_core.c
@@ -0,0 +1,1785 @@
1/**
2 * Copyright (C) ST-Ericsson SA 2010
3 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
4 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
5 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
6 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
7 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
8 * Author: Andreas Westin <andreas.westin@stericsson.com> for ST-Ericsson.
9 * License terms: GNU General Public License (GPL) version 2
10 */
11
12#include <linux/clk.h>
13#include <linux/completion.h>
14#include <linux/crypto.h>
15#include <linux/dmaengine.h>
16#include <linux/err.h>
17#include <linux/errno.h>
18#include <linux/interrupt.h>
19#include <linux/io.h>
20#include <linux/irqreturn.h>
21#include <linux/klist.h>
22#include <linux/module.h>
23#include <linux/platform_device.h>
24#include <linux/regulator/consumer.h>
25#include <linux/semaphore.h>
26
27#include <crypto/aes.h>
28#include <crypto/algapi.h>
29#include <crypto/ctr.h>
30#include <crypto/des.h>
31#include <crypto/scatterwalk.h>
32
33#include <plat/ste_dma40.h>
34
35#include <mach/crypto-ux500.h>
36#include <mach/hardware.h>
37
38#include "cryp_p.h"
39#include "cryp.h"
40
41#define CRYP_MAX_KEY_SIZE 32
42#define BYTES_PER_WORD 4
43
44static int cryp_mode;
45static atomic_t session_id;
46
47static struct stedma40_chan_cfg *mem_to_engine;
48static struct stedma40_chan_cfg *engine_to_mem;
49
50/**
51 * struct cryp_driver_data - data specific to the driver.
52 *
53 * @device_list: A list of registered devices to choose from.
54 * @device_allocation: A semaphore initialized with number of devices.
55 */
56struct cryp_driver_data {
57 struct klist device_list;
58 struct semaphore device_allocation;
59};
60
61/**
62 * struct cryp_ctx - Crypto context
63 * @config: Crypto mode.
64 * @key[CRYP_MAX_KEY_SIZE]: Key.
65 * @keylen: Length of key.
66 * @iv: Pointer to initialization vector.
67 * @indata: Pointer to indata.
68 * @outdata: Pointer to outdata.
69 * @datalen: Length of indata.
70 * @outlen: Length of outdata.
71 * @blocksize: Size of blocks.
72 * @updated: Updated flag.
73 * @dev_ctx: Device dependent context.
74 * @device: Pointer to the device.
75 */
76struct cryp_ctx {
77 struct cryp_config config;
78 u8 key[CRYP_MAX_KEY_SIZE];
79 u32 keylen;
80 u8 *iv;
81 const u8 *indata;
82 u8 *outdata;
83 u32 datalen;
84 u32 outlen;
85 u32 blocksize;
86 u8 updated;
87 struct cryp_device_context dev_ctx;
88 struct cryp_device_data *device;
89 u32 session_id;
90};
91
92static struct cryp_driver_data driver_data;
93
94/**
95 * uint8p_to_uint32_be - 4*uint8 to uint32 big endian
96 * @in: Data to convert.
97 */
98static inline u32 uint8p_to_uint32_be(u8 *in)
99{
100 u32 *data = (u32 *)in;
101
102 return cpu_to_be32p(data);
103}
104
105/**
106 * swap_bits_in_byte - mirror the bits in a byte
107 * @b: the byte to be mirrored
108 *
109 * The bits are swapped the following way:
110 * Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
111 * nibble 2 (n2) bits 4-7.
112 *
113 * Nibble 1 (n1):
114 * (The "old" (moved) bit is replaced with a zero)
115 * 1. Move bit 6 and 7, 4 positions to the left.
116 * 2. Move bit 3 and 5, 2 positions to the left.
117 * 3. Move bit 1-4, 1 position to the left.
118 *
119 * Nibble 2 (n2):
120 * 1. Move bit 0 and 1, 4 positions to the right.
121 * 2. Move bit 2 and 4, 2 positions to the right.
122 * 3. Move bit 3-6, 1 position to the right.
123 *
124 * Combine the two nibbles to a complete and swapped byte.
125 */
126
127static inline u8 swap_bits_in_byte(u8 b)
128{
129#define R_SHIFT_4_MASK 0xc0 /* Bits 6 and 7, right shift 4 */
130#define R_SHIFT_2_MASK 0x28 /* (After right shift 4) Bits 3 and 5,
131 right shift 2 */
132#define R_SHIFT_1_MASK 0x1e /* (After right shift 2) Bits 1-4,
133 right shift 1 */
134#define L_SHIFT_4_MASK 0x03 /* Bits 0 and 1, left shift 4 */
135#define L_SHIFT_2_MASK 0x14 /* (After left shift 4) Bits 2 and 4,
136 left shift 2 */
137#define L_SHIFT_1_MASK 0x78 /* (After left shift 1) Bits 3-6,
138 left shift 1 */
139
140 u8 n1;
141 u8 n2;
142
143 /* Swap most significant nibble */
144 /* Right shift 4, bits 6 and 7 */
145 n1 = ((b & R_SHIFT_4_MASK) >> 4) | (b & ~(R_SHIFT_4_MASK >> 4));
146 /* Right shift 2, bits 3 and 5 */
147 n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
148 /* Right shift 1, bits 1-4 */
149 n1 = (n1 & R_SHIFT_1_MASK) >> 1;
150
151 /* Swap least significant nibble */
152 /* Left shift 4, bits 0 and 1 */
153 n2 = ((b & L_SHIFT_4_MASK) << 4) | (b & ~(L_SHIFT_4_MASK << 4));
154 /* Left shift 2, bits 2 and 4 */
155 n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
156 /* Left shift 1, bits 3-6 */
157 n2 = (n2 & L_SHIFT_1_MASK) << 1;
158
159 return n1 | n2;
160}
161
162static inline void swap_words_in_key_and_bits_in_byte(const u8 *in,
163 u8 *out, u32 len)
164{
165 unsigned int i = 0;
166 int j;
167 int index = 0;
168
169 j = len - BYTES_PER_WORD;
170 while (j >= 0) {
171 for (i = 0; i < BYTES_PER_WORD; i++) {
172 index = len - j - BYTES_PER_WORD + i;
173 out[j + i] =
174 swap_bits_in_byte(in[index]);
175 }
176 j -= BYTES_PER_WORD;
177 }
178}
179
180static void add_session_id(struct cryp_ctx *ctx)
181{
182 /*
183 * We never want 0 to be a valid value, since this is the default value
184 * for the software context.
185 */
186 if (unlikely(atomic_inc_and_test(&session_id)))
187 atomic_inc(&session_id);
188
189 ctx->session_id = atomic_read(&session_id);
190}
191
192static irqreturn_t cryp_interrupt_handler(int irq, void *param)
193{
194 struct cryp_ctx *ctx;
195 int i;
196 struct cryp_device_data *device_data;
197
198 if (param == NULL) {
199 BUG_ON(!param);
200 return IRQ_HANDLED;
201 }
202
203 /* The device is coming from the one found in hw_crypt_noxts. */
204 device_data = (struct cryp_device_data *)param;
205
206 ctx = device_data->current_ctx;
207
208 if (ctx == NULL) {
209 BUG_ON(!ctx);
210 return IRQ_HANDLED;
211 }
212
213 dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen,
214 cryp_pending_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO) ?
215 "out" : "in");
216
217 if (cryp_pending_irq_src(device_data,
218 CRYP_IRQ_SRC_OUTPUT_FIFO)) {
219 if (ctx->outlen / ctx->blocksize > 0) {
220 for (i = 0; i < ctx->blocksize / 4; i++) {
221 *(ctx->outdata) = readl_relaxed(
222 &device_data->base->dout);
223 ctx->outdata += 4;
224 ctx->outlen -= 4;
225 }
226
227 if (ctx->outlen == 0) {
228 cryp_disable_irq_src(device_data,
229 CRYP_IRQ_SRC_OUTPUT_FIFO);
230 }
231 }
232 } else if (cryp_pending_irq_src(device_data,
233 CRYP_IRQ_SRC_INPUT_FIFO)) {
234 if (ctx->datalen / ctx->blocksize > 0) {
235 for (i = 0 ; i < ctx->blocksize / 4; i++) {
236 writel_relaxed(ctx->indata,
237 &device_data->base->din);
238 ctx->indata += 4;
239 ctx->datalen -= 4;
240 }
241
242 if (ctx->datalen == 0)
243 cryp_disable_irq_src(device_data,
244 CRYP_IRQ_SRC_INPUT_FIFO);
245
246 if (ctx->config.algomode == CRYP_ALGO_AES_XTS) {
247 CRYP_PUT_BITS(&device_data->base->cr,
248 CRYP_START_ENABLE,
249 CRYP_CR_START_POS,
250 CRYP_CR_START_MASK);
251
252 cryp_wait_until_done(device_data);
253 }
254 }
255 }
256
257 return IRQ_HANDLED;
258}
259
260static int mode_is_aes(enum cryp_algo_mode mode)
261{
262 return CRYP_ALGO_AES_ECB == mode ||
263 CRYP_ALGO_AES_CBC == mode ||
264 CRYP_ALGO_AES_CTR == mode ||
265 CRYP_ALGO_AES_XTS == mode;
266}
267
268static int cfg_iv(struct cryp_device_data *device_data, u32 left, u32 right,
269 enum cryp_init_vector_index index)
270{
271 struct cryp_init_vector_value vector_value;
272
273 dev_dbg(device_data->dev, "[%s]", __func__);
274
275 vector_value.init_value_left = left;
276 vector_value.init_value_right = right;
277
278 return cryp_configure_init_vector(device_data,
279 index,
280 vector_value);
281}
282
283static int cfg_ivs(struct cryp_device_data *device_data, struct cryp_ctx *ctx)
284{
285 int i;
286 int status = 0;
287 int num_of_regs = ctx->blocksize / 8;
288 u32 iv[AES_BLOCK_SIZE / 4];
289
290 dev_dbg(device_data->dev, "[%s]", __func__);
291
292 /*
293 * Since we loop on num_of_regs we need to have a check in case
294 * someone provides an incorrect blocksize which would force calling
295 * cfg_iv with i greater than 2 which is an error.
296 */
297 if (num_of_regs > 2) {
298 dev_err(device_data->dev, "[%s] Incorrect blocksize %d",
299 __func__, ctx->blocksize);
300 return -EINVAL;
301 }
302
303 for (i = 0; i < ctx->blocksize / 4; i++)
304 iv[i] = uint8p_to_uint32_be(ctx->iv + i*4);
305
306 for (i = 0; i < num_of_regs; i++) {
307 status = cfg_iv(device_data, iv[i*2], iv[i*2+1],
308 (enum cryp_init_vector_index) i);
309 if (status != 0)
310 return status;
311 }
312 return status;
313}
314
315static int set_key(struct cryp_device_data *device_data,
316 u32 left_key,
317 u32 right_key,
318 enum cryp_key_reg_index index)
319{
320 struct cryp_key_value key_value;
321 int cryp_error;
322
323 dev_dbg(device_data->dev, "[%s]", __func__);
324
325 key_value.key_value_left = left_key;
326 key_value.key_value_right = right_key;
327
328 cryp_error = cryp_configure_key_values(device_data,
329 index,
330 key_value);
331 if (cryp_error != 0)
332 dev_err(device_data->dev, "[%s]: "
333 "cryp_configure_key_values() failed!", __func__);
334
335 return cryp_error;
336}
337
338static int cfg_keys(struct cryp_ctx *ctx)
339{
340 int i;
341 int num_of_regs = ctx->keylen / 8;
342 u32 swapped_key[CRYP_MAX_KEY_SIZE / 4];
343 int cryp_error = 0;
344
345 dev_dbg(ctx->device->dev, "[%s]", __func__);
346
347 if (mode_is_aes(ctx->config.algomode)) {
348 swap_words_in_key_and_bits_in_byte((u8 *)ctx->key,
349 (u8 *)swapped_key,
350 ctx->keylen);
351 } else {
352 for (i = 0; i < ctx->keylen / 4; i++)
353 swapped_key[i] = uint8p_to_uint32_be(ctx->key + i*4);
354 }
355
356 for (i = 0; i < num_of_regs; i++) {
357 cryp_error = set_key(ctx->device,
358 *(((u32 *)swapped_key)+i*2),
359 *(((u32 *)swapped_key)+i*2+1),
360 (enum cryp_key_reg_index) i);
361
362 if (cryp_error != 0) {
363 dev_err(ctx->device->dev, "[%s]: set_key() failed!",
364 __func__);
365 return cryp_error;
366 }
367 }
368 return cryp_error;
369}
370
371static int cryp_setup_context(struct cryp_ctx *ctx,
372 struct cryp_device_data *device_data)
373{
374 u32 control_register = CRYP_CR_DEFAULT;
375
376 switch (cryp_mode) {
377 case CRYP_MODE_INTERRUPT:
378 writel_relaxed(CRYP_IMSC_DEFAULT, &device_data->base->imsc);
379 break;
380
381 case CRYP_MODE_DMA:
382 writel_relaxed(CRYP_DMACR_DEFAULT, &device_data->base->dmacr);
383 break;
384
385 default:
386 break;
387 }
388
389 if (ctx->updated == 0) {
390 cryp_flush_inoutfifo(device_data);
391 if (cfg_keys(ctx) != 0) {
392 dev_err(ctx->device->dev, "[%s]: cfg_keys failed!",
393 __func__);
394 return -EINVAL;
395 }
396
397 if (ctx->iv &&
398 CRYP_ALGO_AES_ECB != ctx->config.algomode &&
399 CRYP_ALGO_DES_ECB != ctx->config.algomode &&
400 CRYP_ALGO_TDES_ECB != ctx->config.algomode) {
401 if (cfg_ivs(device_data, ctx) != 0)
402 return -EPERM;
403 }
404
405 cryp_set_configuration(device_data, &ctx->config,
406 &control_register);
407 add_session_id(ctx);
408 } else if (ctx->updated == 1 &&
409 ctx->session_id != atomic_read(&session_id)) {
410 cryp_flush_inoutfifo(device_data);
411 cryp_restore_device_context(device_data, &ctx->dev_ctx);
412
413 add_session_id(ctx);
414 control_register = ctx->dev_ctx.cr;
415 } else
416 control_register = ctx->dev_ctx.cr;
417
418 writel(control_register |
419 (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS),
420 &device_data->base->cr);
421
422 return 0;
423}
424
425static int cryp_get_device_data(struct cryp_ctx *ctx,
426 struct cryp_device_data **device_data)
427{
428 int ret;
429 struct klist_iter device_iterator;
430 struct klist_node *device_node;
431 struct cryp_device_data *local_device_data = NULL;
432 pr_debug(DEV_DBG_NAME " [%s]", __func__);
433
434 /* Wait until a device is available */
435 ret = down_interruptible(&driver_data.device_allocation);
436 if (ret)
437 return ret; /* Interrupted */
438
439 /* Select a device */
440 klist_iter_init(&driver_data.device_list, &device_iterator);
441
442 device_node = klist_next(&device_iterator);
443 while (device_node) {
444 local_device_data = container_of(device_node,
445 struct cryp_device_data, list_node);
446 spin_lock(&local_device_data->ctx_lock);
447 /* current_ctx allocates a device, NULL = unallocated */
448 if (local_device_data->current_ctx) {
449 device_node = klist_next(&device_iterator);
450 } else {
451 local_device_data->current_ctx = ctx;
452 ctx->device = local_device_data;
453 spin_unlock(&local_device_data->ctx_lock);
454 break;
455 }
456 spin_unlock(&local_device_data->ctx_lock);
457 }
458 klist_iter_exit(&device_iterator);
459
460 if (!device_node) {
461 /**
462 * No free device found.
463 * Since we allocated a device with down_interruptible, this
464 * should not be able to happen.
465 * Number of available devices, which are contained in
466 * device_allocation, is therefore decremented by not doing
467 * an up(device_allocation).
468 */
469 return -EBUSY;
470 }
471
472 *device_data = local_device_data;
473
474 return 0;
475}
476
477static void cryp_dma_setup_channel(struct cryp_device_data *device_data,
478 struct device *dev)
479{
480 dma_cap_zero(device_data->dma.mask);
481 dma_cap_set(DMA_SLAVE, device_data->dma.mask);
482
483 device_data->dma.cfg_mem2cryp = mem_to_engine;
484 device_data->dma.chan_mem2cryp =
485 dma_request_channel(device_data->dma.mask,
486 stedma40_filter,
487 device_data->dma.cfg_mem2cryp);
488
489 device_data->dma.cfg_cryp2mem = engine_to_mem;
490 device_data->dma.chan_cryp2mem =
491 dma_request_channel(device_data->dma.mask,
492 stedma40_filter,
493 device_data->dma.cfg_cryp2mem);
494
495 init_completion(&device_data->dma.cryp_dma_complete);
496}
497
498static void cryp_dma_out_callback(void *data)
499{
500 struct cryp_ctx *ctx = (struct cryp_ctx *) data;
501 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
502
503 complete(&ctx->device->dma.cryp_dma_complete);
504}
505
506static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
507 struct scatterlist *sg,
508 int len,
509 enum dma_data_direction direction)
510{
511 struct dma_async_tx_descriptor *desc;
512 struct dma_chan *channel = NULL;
513 dma_cookie_t cookie;
514
515 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
516
517 if (unlikely(!IS_ALIGNED((u32)sg, 4))) {
518 dev_err(ctx->device->dev, "[%s]: Data in sg list isn't "
519 "aligned! Addr: 0x%08x", __func__, (u32)sg);
520 return -EFAULT;
521 }
522
523 switch (direction) {
524 case DMA_TO_DEVICE:
525 channel = ctx->device->dma.chan_mem2cryp;
526 ctx->device->dma.sg_src = sg;
527 ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
528 ctx->device->dma.sg_src,
529 ctx->device->dma.nents_src,
530 direction);
531
532 if (!ctx->device->dma.sg_src_len) {
533 dev_dbg(ctx->device->dev,
534 "[%s]: Could not map the sg list (TO_DEVICE)",
535 __func__);
536 return -EFAULT;
537 }
538
539 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
540 "(TO_DEVICE)", __func__);
541
542 desc = channel->device->device_prep_slave_sg(channel,
543 ctx->device->dma.sg_src,
544 ctx->device->dma.sg_src_len,
545 direction,
546 DMA_CTRL_ACK);
547 break;
548
549 case DMA_FROM_DEVICE:
550 channel = ctx->device->dma.chan_cryp2mem;
551 ctx->device->dma.sg_dst = sg;
552 ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
553 ctx->device->dma.sg_dst,
554 ctx->device->dma.nents_dst,
555 direction);
556
557 if (!ctx->device->dma.sg_dst_len) {
558 dev_dbg(ctx->device->dev,
559 "[%s]: Could not map the sg list (FROM_DEVICE)",
560 __func__);
561 return -EFAULT;
562 }
563
564 dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
565 "(FROM_DEVICE)", __func__);
566
567 desc = channel->device->device_prep_slave_sg(channel,
568 ctx->device->dma.sg_dst,
569 ctx->device->dma.sg_dst_len,
570 direction,
571 DMA_CTRL_ACK |
572 DMA_PREP_INTERRUPT);
573
574 desc->callback = cryp_dma_out_callback;
575 desc->callback_param = ctx;
576 break;
577
578 default:
579 dev_dbg(ctx->device->dev, "[%s]: Invalid DMA direction",
580 __func__);
581 return -EFAULT;
582 }
583
584 cookie = desc->tx_submit(desc);
585 dma_async_issue_pending(channel);
586
587 return 0;
588}
589
590static void cryp_dma_done(struct cryp_ctx *ctx)
591{
592 struct dma_chan *chan;
593
594 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
595
596 chan = ctx->device->dma.chan_mem2cryp;
597 chan->device->device_control(chan, DMA_TERMINATE_ALL, 0);
598 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
599 ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
600
601 chan = ctx->device->dma.chan_cryp2mem;
602 chan->device->device_control(chan, DMA_TERMINATE_ALL, 0);
603 dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
604 ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
605}
606
607static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
608 int len)
609{
610 int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
611 dev_dbg(ctx->device->dev, "[%s]: ", __func__);
612
613 if (error) {
614 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
615 "failed", __func__);
616 return error;
617 }
618
619 return len;
620}
621
622static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
623{
624 int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
625 if (error) {
626 dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
627 "failed", __func__);
628 return error;
629 }
630
631 return len;
632}
633
634static void cryp_polling_mode(struct cryp_ctx *ctx,
635 struct cryp_device_data *device_data)
636{
637 int len = ctx->blocksize / BYTES_PER_WORD;
638 int remaining_length = ctx->datalen;
639 u32 *indata = (u32 *)ctx->indata;
640 u32 *outdata = (u32 *)ctx->outdata;
641
642 while (remaining_length > 0) {
643 writesl(&device_data->base->din, indata, len);
644 indata += len;
645 remaining_length -= (len * BYTES_PER_WORD);
646 cryp_wait_until_done(device_data);
647
648 readsl(&device_data->base->dout, outdata, len);
649 outdata += len;
650 cryp_wait_until_done(device_data);
651 }
652}
653
654static int cryp_disable_power(struct device *dev,
655 struct cryp_device_data *device_data,
656 bool save_device_context)
657{
658 int ret = 0;
659
660 dev_dbg(dev, "[%s]", __func__);
661
662 spin_lock(&device_data->power_state_spinlock);
663 if (!device_data->power_state)
664 goto out;
665
666 spin_lock(&device_data->ctx_lock);
667 if (save_device_context && device_data->current_ctx) {
668 cryp_save_device_context(device_data,
669 &device_data->current_ctx->dev_ctx,
670 cryp_mode);
671 device_data->restore_dev_ctx = true;
672 }
673 spin_unlock(&device_data->ctx_lock);
674
675 clk_disable(device_data->clk);
676 ret = regulator_disable(device_data->pwr_regulator);
677 if (ret)
678 dev_err(dev, "[%s]: "
679 "regulator_disable() failed!",
680 __func__);
681
682 device_data->power_state = false;
683
684out:
685 spin_unlock(&device_data->power_state_spinlock);
686
687 return ret;
688}
689
690static int cryp_enable_power(
691 struct device *dev,
692 struct cryp_device_data *device_data,
693 bool restore_device_context)
694{
695 int ret = 0;
696
697 dev_dbg(dev, "[%s]", __func__);
698
699 spin_lock(&device_data->power_state_spinlock);
700 if (!device_data->power_state) {
701 ret = regulator_enable(device_data->pwr_regulator);
702 if (ret) {
703 dev_err(dev, "[%s]: regulator_enable() failed!",
704 __func__);
705 goto out;
706 }
707
708 ret = clk_enable(device_data->clk);
709 if (ret) {
710 dev_err(dev, "[%s]: clk_enable() failed!",
711 __func__);
712 regulator_disable(device_data->pwr_regulator);
713 goto out;
714 }
715 device_data->power_state = true;
716 }
717
718 if (device_data->restore_dev_ctx) {
719 spin_lock(&device_data->ctx_lock);
720 if (restore_device_context && device_data->current_ctx) {
721 device_data->restore_dev_ctx = false;
722 cryp_restore_device_context(device_data,
723 &device_data->current_ctx->dev_ctx);
724 }
725 spin_unlock(&device_data->ctx_lock);
726 }
727out:
728 spin_unlock(&device_data->power_state_spinlock);
729
730 return ret;
731}
732
733static int hw_crypt_noxts(struct cryp_ctx *ctx,
734 struct cryp_device_data *device_data)
735{
736 int ret = 0;
737
738 const u8 *indata = ctx->indata;
739 u8 *outdata = ctx->outdata;
740 u32 datalen = ctx->datalen;
741 u32 outlen = datalen;
742
743 pr_debug(DEV_DBG_NAME " [%s]", __func__);
744
745 ctx->outlen = ctx->datalen;
746
747 if (unlikely(!IS_ALIGNED((u32)indata, 4))) {
748 pr_debug(DEV_DBG_NAME " [%s]: Data isn't aligned! Addr: "
749 "0x%08x", __func__, (u32)indata);
750 return -EINVAL;
751 }
752
753 ret = cryp_setup_context(ctx, device_data);
754
755 if (ret)
756 goto out;
757
758 if (cryp_mode == CRYP_MODE_INTERRUPT) {
759 cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO |
760 CRYP_IRQ_SRC_OUTPUT_FIFO);
761
762 /*
763 * ctx->outlen is decremented in the cryp_interrupt_handler
764 * function. We had to add cpu_relax() (barrier) to make sure
765 * that gcc didn't optimze away this variable.
766 */
767 while (ctx->outlen > 0)
768 cpu_relax();
769 } else if (cryp_mode == CRYP_MODE_POLLING ||
770 cryp_mode == CRYP_MODE_DMA) {
771 /*
772 * The reason for having DMA in this if case is that if we are
773 * running cryp_mode = 2, then we separate DMA routines for
774 * handling cipher/plaintext > blocksize, except when
775 * running the normal CRYPTO_ALG_TYPE_CIPHER, then we still use
776 * the polling mode. Overhead of doing DMA setup eats up the
777 * benefits using it.
778 */
779 cryp_polling_mode(ctx, device_data);
780 } else {
781 dev_err(ctx->device->dev, "[%s]: Invalid operation mode!",
782 __func__);
783 ret = -EPERM;
784 goto out;
785 }
786
787 cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
788 ctx->updated = 1;
789
790out:
791 ctx->indata = indata;
792 ctx->outdata = outdata;
793 ctx->datalen = datalen;
794 ctx->outlen = outlen;
795
796 return ret;
797}
798
799static int get_nents(struct scatterlist *sg, int nbytes)
800{
801 int nents = 0;
802
803 while (nbytes > 0) {
804 nbytes -= sg->length;
805 sg = scatterwalk_sg_next(sg);
806 nents++;
807 }
808
809 return nents;
810}
811
812static int ablk_dma_crypt(struct ablkcipher_request *areq)
813{
814 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
815 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
816 struct cryp_device_data *device_data;
817
818 int bytes_written = 0;
819 int bytes_read = 0;
820 int ret;
821
822 pr_debug(DEV_DBG_NAME " [%s]", __func__);
823
824 ctx->datalen = areq->nbytes;
825 ctx->outlen = areq->nbytes;
826
827 ret = cryp_get_device_data(ctx, &device_data);
828 if (ret)
829 return ret;
830
831 ret = cryp_setup_context(ctx, device_data);
832 if (ret)
833 goto out;
834
835 /* We have the device now, so store the nents in the dma struct. */
836 ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
837 ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
838
839 /* Enable DMA in- and output. */
840 cryp_configure_for_dma(device_data, CRYP_DMA_ENABLE_BOTH_DIRECTIONS);
841
842 bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen);
843 bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written);
844
845 wait_for_completion(&ctx->device->dma.cryp_dma_complete);
846 cryp_dma_done(ctx);
847
848 cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
849 ctx->updated = 1;
850
851out:
852 spin_lock(&device_data->ctx_lock);
853 device_data->current_ctx = NULL;
854 ctx->device = NULL;
855 spin_unlock(&device_data->ctx_lock);
856
857 /*
858 * The down_interruptible part for this semaphore is called in
859 * cryp_get_device_data.
860 */
861 up(&driver_data.device_allocation);
862
863 if (unlikely(bytes_written != bytes_read))
864 return -EPERM;
865
866 return 0;
867}
868
869static int ablk_crypt(struct ablkcipher_request *areq)
870{
871 struct ablkcipher_walk walk;
872 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
873 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
874 struct cryp_device_data *device_data;
875 unsigned long src_paddr;
876 unsigned long dst_paddr;
877 int ret;
878 int nbytes;
879
880 pr_debug(DEV_DBG_NAME " [%s]", __func__);
881
882 ret = cryp_get_device_data(ctx, &device_data);
883 if (ret)
884 goto out;
885
886 ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes);
887 ret = ablkcipher_walk_phys(areq, &walk);
888
889 if (ret) {
890 pr_err(DEV_DBG_NAME "[%s]: ablkcipher_walk_phys() failed!",
891 __func__);
892 goto out;
893 }
894
895 while ((nbytes = walk.nbytes) > 0) {
896 ctx->iv = walk.iv;
897 src_paddr = (page_to_phys(walk.src.page) + walk.src.offset);
898 ctx->indata = phys_to_virt(src_paddr);
899
900 dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset);
901 ctx->outdata = phys_to_virt(dst_paddr);
902
903 ctx->datalen = nbytes - (nbytes % ctx->blocksize);
904
905 ret = hw_crypt_noxts(ctx, device_data);
906 if (ret)
907 goto out;
908
909 nbytes -= ctx->datalen;
910 ret = ablkcipher_walk_done(areq, &walk, nbytes);
911 if (ret)
912 goto out;
913 }
914 ablkcipher_walk_complete(&walk);
915
916out:
917 /* Release the device */
918 spin_lock(&device_data->ctx_lock);
919 device_data->current_ctx = NULL;
920 ctx->device = NULL;
921 spin_unlock(&device_data->ctx_lock);
922
923 /*
924 * The down_interruptible part for this semaphore is called in
925 * cryp_get_device_data.
926 */
927 up(&driver_data.device_allocation);
928
929 return ret;
930}
931
932static int aes_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
933 const u8 *key, unsigned int keylen)
934{
935 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
936 u32 *flags = &cipher->base.crt_flags;
937
938 pr_debug(DEV_DBG_NAME " [%s]", __func__);
939
940 switch (keylen) {
941 case AES_KEYSIZE_128:
942 ctx->config.keysize = CRYP_KEY_SIZE_128;
943 break;
944
945 case AES_KEYSIZE_192:
946 ctx->config.keysize = CRYP_KEY_SIZE_192;
947 break;
948
949 case AES_KEYSIZE_256:
950 ctx->config.keysize = CRYP_KEY_SIZE_256;
951 break;
952
953 default:
954 pr_err(DEV_DBG_NAME "[%s]: Unknown keylen!", __func__);
955 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
956 return -EINVAL;
957 }
958
959 memcpy(ctx->key, key, keylen);
960 ctx->keylen = keylen;
961
962 ctx->updated = 0;
963
964 return 0;
965}
966
967static int des_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
968 const u8 *key, unsigned int keylen)
969{
970 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
971 u32 *flags = &cipher->base.crt_flags;
972 u32 tmp[DES_EXPKEY_WORDS];
973 int ret;
974
975 pr_debug(DEV_DBG_NAME " [%s]", __func__);
976 if (keylen != DES_KEY_SIZE) {
977 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
978 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_BAD_KEY_LEN",
979 __func__);
980 return -EINVAL;
981 }
982
983 ret = des_ekey(tmp, key);
984 if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
985 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
986 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_REQ_WEAK_KEY",
987 __func__);
988 return -EINVAL;
989 }
990
991 memcpy(ctx->key, key, keylen);
992 ctx->keylen = keylen;
993
994 ctx->updated = 0;
995 return 0;
996}
997
998static int des3_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
999 const u8 *key, unsigned int keylen)
1000{
1001 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1002 u32 *flags = &cipher->base.crt_flags;
1003 const u32 *K = (const u32 *)key;
1004 u32 tmp[DES3_EDE_EXPKEY_WORDS];
1005 int i, ret;
1006
1007 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1008 if (keylen != DES3_EDE_KEY_SIZE) {
1009 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
1010 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_BAD_KEY_LEN",
1011 __func__);
1012 return -EINVAL;
1013 }
1014
1015 /* Checking key interdependency for weak key detection. */
1016 if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
1017 !((K[2] ^ K[4]) | (K[3] ^ K[5]))) &&
1018 (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
1019 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
1020 pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_REQ_WEAK_KEY",
1021 __func__);
1022 return -EINVAL;
1023 }
1024 for (i = 0; i < 3; i++) {
1025 ret = des_ekey(tmp, key + i*DES_KEY_SIZE);
1026 if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
1027 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
1028 pr_debug(DEV_DBG_NAME " [%s]: "
1029 "CRYPTO_TFM_REQ_WEAK_KEY", __func__);
1030 return -EINVAL;
1031 }
1032 }
1033
1034 memcpy(ctx->key, key, keylen);
1035 ctx->keylen = keylen;
1036
1037 ctx->updated = 0;
1038 return 0;
1039}
1040
1041static int cryp_blk_encrypt(struct ablkcipher_request *areq)
1042{
1043 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1044 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1045
1046 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1047
1048 ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT;
1049
1050 /*
1051 * DMA does not work for DES due to a hw bug */
1052 if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1053 return ablk_dma_crypt(areq);
1054
1055 /* For everything except DMA, we run the non DMA version. */
1056 return ablk_crypt(areq);
1057}
1058
1059static int cryp_blk_decrypt(struct ablkcipher_request *areq)
1060{
1061 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1062 struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1063
1064 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1065
1066 ctx->config.algodir = CRYP_ALGORITHM_DECRYPT;
1067
1068 /* DMA does not work for DES due to a hw bug */
1069 if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1070 return ablk_dma_crypt(areq);
1071
1072 /* For everything except DMA, we run the non DMA version. */
1073 return ablk_crypt(areq);
1074}
1075
1076struct cryp_algo_template {
1077 enum cryp_algo_mode algomode;
1078 struct crypto_alg crypto;
1079};
1080
1081static int cryp_cra_init(struct crypto_tfm *tfm)
1082{
1083 struct cryp_ctx *ctx = crypto_tfm_ctx(tfm);
1084 struct crypto_alg *alg = tfm->__crt_alg;
1085 struct cryp_algo_template *cryp_alg = container_of(alg,
1086 struct cryp_algo_template,
1087 crypto);
1088
1089 ctx->config.algomode = cryp_alg->algomode;
1090 ctx->blocksize = crypto_tfm_alg_blocksize(tfm);
1091
1092 return 0;
1093}
1094
1095static struct cryp_algo_template cryp_algs[] = {
1096 {
1097 .algomode = CRYP_ALGO_AES_ECB,
1098 .crypto = {
1099 .cra_name = "aes",
1100 .cra_driver_name = "aes-ux500",
1101 .cra_priority = 300,
1102 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1103 CRYPTO_ALG_ASYNC,
1104 .cra_blocksize = AES_BLOCK_SIZE,
1105 .cra_ctxsize = sizeof(struct cryp_ctx),
1106 .cra_alignmask = 3,
1107 .cra_type = &crypto_ablkcipher_type,
1108 .cra_init = cryp_cra_init,
1109 .cra_module = THIS_MODULE,
1110 .cra_u = {
1111 .ablkcipher = {
1112 .min_keysize = AES_MIN_KEY_SIZE,
1113 .max_keysize = AES_MAX_KEY_SIZE,
1114 .setkey = aes_ablkcipher_setkey,
1115 .encrypt = cryp_blk_encrypt,
1116 .decrypt = cryp_blk_decrypt
1117 }
1118 }
1119 }
1120 },
1121 {
1122 .algomode = CRYP_ALGO_AES_ECB,
1123 .crypto = {
1124 .cra_name = "ecb(aes)",
1125 .cra_driver_name = "ecb-aes-ux500",
1126 .cra_priority = 300,
1127 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1128 CRYPTO_ALG_ASYNC,
1129 .cra_blocksize = AES_BLOCK_SIZE,
1130 .cra_ctxsize = sizeof(struct cryp_ctx),
1131 .cra_alignmask = 3,
1132 .cra_type = &crypto_ablkcipher_type,
1133 .cra_init = cryp_cra_init,
1134 .cra_module = THIS_MODULE,
1135 .cra_u = {
1136 .ablkcipher = {
1137 .min_keysize = AES_MIN_KEY_SIZE,
1138 .max_keysize = AES_MAX_KEY_SIZE,
1139 .setkey = aes_ablkcipher_setkey,
1140 .encrypt = cryp_blk_encrypt,
1141 .decrypt = cryp_blk_decrypt,
1142 }
1143 }
1144 }
1145 },
1146 {
1147 .algomode = CRYP_ALGO_AES_CBC,
1148 .crypto = {
1149 .cra_name = "cbc(aes)",
1150 .cra_driver_name = "cbc-aes-ux500",
1151 .cra_priority = 300,
1152 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1153 CRYPTO_ALG_ASYNC,
1154 .cra_blocksize = AES_BLOCK_SIZE,
1155 .cra_ctxsize = sizeof(struct cryp_ctx),
1156 .cra_alignmask = 3,
1157 .cra_type = &crypto_ablkcipher_type,
1158 .cra_init = cryp_cra_init,
1159 .cra_module = THIS_MODULE,
1160 .cra_u = {
1161 .ablkcipher = {
1162 .min_keysize = AES_MIN_KEY_SIZE,
1163 .max_keysize = AES_MAX_KEY_SIZE,
1164 .setkey = aes_ablkcipher_setkey,
1165 .encrypt = cryp_blk_encrypt,
1166 .decrypt = cryp_blk_decrypt,
1167 .ivsize = AES_BLOCK_SIZE,
1168 }
1169 }
1170 }
1171 },
1172 {
1173 .algomode = CRYP_ALGO_AES_CTR,
1174 .crypto = {
1175 .cra_name = "ctr(aes)",
1176 .cra_driver_name = "ctr-aes-ux500",
1177 .cra_priority = 300,
1178 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1179 CRYPTO_ALG_ASYNC,
1180 .cra_blocksize = AES_BLOCK_SIZE,
1181 .cra_ctxsize = sizeof(struct cryp_ctx),
1182 .cra_alignmask = 3,
1183 .cra_type = &crypto_ablkcipher_type,
1184 .cra_init = cryp_cra_init,
1185 .cra_module = THIS_MODULE,
1186 .cra_u = {
1187 .ablkcipher = {
1188 .min_keysize = AES_MIN_KEY_SIZE,
1189 .max_keysize = AES_MAX_KEY_SIZE,
1190 .setkey = aes_ablkcipher_setkey,
1191 .encrypt = cryp_blk_encrypt,
1192 .decrypt = cryp_blk_decrypt,
1193 .ivsize = AES_BLOCK_SIZE,
1194 }
1195 }
1196 }
1197 },
1198 {
1199 .algomode = CRYP_ALGO_DES_ECB,
1200 .crypto = {
1201 .cra_name = "des",
1202 .cra_driver_name = "des-ux500",
1203 .cra_priority = 300,
1204 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1205 CRYPTO_ALG_ASYNC,
1206 .cra_blocksize = DES_BLOCK_SIZE,
1207 .cra_ctxsize = sizeof(struct cryp_ctx),
1208 .cra_alignmask = 3,
1209 .cra_type = &crypto_ablkcipher_type,
1210 .cra_init = cryp_cra_init,
1211 .cra_module = THIS_MODULE,
1212 .cra_u = {
1213 .ablkcipher = {
1214 .min_keysize = DES_KEY_SIZE,
1215 .max_keysize = DES_KEY_SIZE,
1216 .setkey = des_ablkcipher_setkey,
1217 .encrypt = cryp_blk_encrypt,
1218 .decrypt = cryp_blk_decrypt
1219 }
1220 }
1221 }
1222
1223 },
1224 {
1225 .algomode = CRYP_ALGO_TDES_ECB,
1226 .crypto = {
1227 .cra_name = "des3_ede",
1228 .cra_driver_name = "des3_ede-ux500",
1229 .cra_priority = 300,
1230 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1231 CRYPTO_ALG_ASYNC,
1232 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1233 .cra_ctxsize = sizeof(struct cryp_ctx),
1234 .cra_alignmask = 3,
1235 .cra_type = &crypto_ablkcipher_type,
1236 .cra_init = cryp_cra_init,
1237 .cra_module = THIS_MODULE,
1238 .cra_u = {
1239 .ablkcipher = {
1240 .min_keysize = DES3_EDE_KEY_SIZE,
1241 .max_keysize = DES3_EDE_KEY_SIZE,
1242 .setkey = des_ablkcipher_setkey,
1243 .encrypt = cryp_blk_encrypt,
1244 .decrypt = cryp_blk_decrypt
1245 }
1246 }
1247 }
1248 },
1249 {
1250 .algomode = CRYP_ALGO_DES_ECB,
1251 .crypto = {
1252 .cra_name = "ecb(des)",
1253 .cra_driver_name = "ecb-des-ux500",
1254 .cra_priority = 300,
1255 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1256 CRYPTO_ALG_ASYNC,
1257 .cra_blocksize = DES_BLOCK_SIZE,
1258 .cra_ctxsize = sizeof(struct cryp_ctx),
1259 .cra_alignmask = 3,
1260 .cra_type = &crypto_ablkcipher_type,
1261 .cra_init = cryp_cra_init,
1262 .cra_module = THIS_MODULE,
1263 .cra_u = {
1264 .ablkcipher = {
1265 .min_keysize = DES_KEY_SIZE,
1266 .max_keysize = DES_KEY_SIZE,
1267 .setkey = des_ablkcipher_setkey,
1268 .encrypt = cryp_blk_encrypt,
1269 .decrypt = cryp_blk_decrypt,
1270 }
1271 }
1272 }
1273 },
1274 {
1275 .algomode = CRYP_ALGO_TDES_ECB,
1276 .crypto = {
1277 .cra_name = "ecb(des3_ede)",
1278 .cra_driver_name = "ecb-des3_ede-ux500",
1279 .cra_priority = 300,
1280 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1281 CRYPTO_ALG_ASYNC,
1282 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1283 .cra_ctxsize = sizeof(struct cryp_ctx),
1284 .cra_alignmask = 3,
1285 .cra_type = &crypto_ablkcipher_type,
1286 .cra_init = cryp_cra_init,
1287 .cra_module = THIS_MODULE,
1288 .cra_u = {
1289 .ablkcipher = {
1290 .min_keysize = DES3_EDE_KEY_SIZE,
1291 .max_keysize = DES3_EDE_KEY_SIZE,
1292 .setkey = des3_ablkcipher_setkey,
1293 .encrypt = cryp_blk_encrypt,
1294 .decrypt = cryp_blk_decrypt,
1295 }
1296 }
1297 }
1298 },
1299 {
1300 .algomode = CRYP_ALGO_DES_CBC,
1301 .crypto = {
1302 .cra_name = "cbc(des)",
1303 .cra_driver_name = "cbc-des-ux500",
1304 .cra_priority = 300,
1305 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1306 CRYPTO_ALG_ASYNC,
1307 .cra_blocksize = DES_BLOCK_SIZE,
1308 .cra_ctxsize = sizeof(struct cryp_ctx),
1309 .cra_alignmask = 3,
1310 .cra_type = &crypto_ablkcipher_type,
1311 .cra_init = cryp_cra_init,
1312 .cra_module = THIS_MODULE,
1313 .cra_u = {
1314 .ablkcipher = {
1315 .min_keysize = DES_KEY_SIZE,
1316 .max_keysize = DES_KEY_SIZE,
1317 .setkey = des_ablkcipher_setkey,
1318 .encrypt = cryp_blk_encrypt,
1319 .decrypt = cryp_blk_decrypt,
1320 }
1321 }
1322 }
1323 },
1324 {
1325 .algomode = CRYP_ALGO_TDES_CBC,
1326 .crypto = {
1327 .cra_name = "cbc(des3_ede)",
1328 .cra_driver_name = "cbc-des3_ede-ux500",
1329 .cra_priority = 300,
1330 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1331 CRYPTO_ALG_ASYNC,
1332 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1333 .cra_ctxsize = sizeof(struct cryp_ctx),
1334 .cra_alignmask = 3,
1335 .cra_type = &crypto_ablkcipher_type,
1336 .cra_init = cryp_cra_init,
1337 .cra_module = THIS_MODULE,
1338 .cra_u = {
1339 .ablkcipher = {
1340 .min_keysize = DES3_EDE_KEY_SIZE,
1341 .max_keysize = DES3_EDE_KEY_SIZE,
1342 .setkey = des3_ablkcipher_setkey,
1343 .encrypt = cryp_blk_encrypt,
1344 .decrypt = cryp_blk_decrypt,
1345 .ivsize = DES3_EDE_BLOCK_SIZE,
1346 }
1347 }
1348 }
1349 }
1350};
1351
1352/**
1353 * cryp_algs_register_all -
1354 */
1355static int cryp_algs_register_all(void)
1356{
1357 int ret;
1358 int i;
1359 int count;
1360
1361 pr_debug("[%s]", __func__);
1362
1363 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++) {
1364 ret = crypto_register_alg(&cryp_algs[i].crypto);
1365 if (ret) {
1366 count = i;
1367 pr_err("[%s] alg registration failed",
1368 cryp_algs[i].crypto.cra_driver_name);
1369 goto unreg;
1370 }
1371 }
1372 return 0;
1373unreg:
1374 for (i = 0; i < count; i++)
1375 crypto_unregister_alg(&cryp_algs[i].crypto);
1376 return ret;
1377}
1378
1379/**
1380 * cryp_algs_unregister_all -
1381 */
1382static void cryp_algs_unregister_all(void)
1383{
1384 int i;
1385
1386 pr_debug(DEV_DBG_NAME " [%s]", __func__);
1387
1388 for (i = 0; i < ARRAY_SIZE(cryp_algs); i++)
1389 crypto_unregister_alg(&cryp_algs[i].crypto);
1390}
1391
1392static int ux500_cryp_probe(struct platform_device *pdev)
1393{
1394 int ret;
1395 int cryp_error = 0;
1396 struct resource *res = NULL;
1397 struct resource *res_irq = NULL;
1398 struct cryp_device_data *device_data;
1399 struct cryp_protection_config prot = {
1400 .privilege_access = CRYP_STATE_ENABLE
1401 };
1402 struct device *dev = &pdev->dev;
1403
1404 dev_dbg(dev, "[%s]", __func__);
1405 device_data = kzalloc(sizeof(struct cryp_device_data), GFP_ATOMIC);
1406 if (!device_data) {
1407 dev_err(dev, "[%s]: kzalloc() failed!", __func__);
1408 ret = -ENOMEM;
1409 goto out;
1410 }
1411
1412 device_data->dev = dev;
1413 device_data->current_ctx = NULL;
1414
1415 /* Grab the DMA configuration from platform data. */
1416 mem_to_engine = &((struct cryp_platform_data *)
1417 dev->platform_data)->mem_to_engine;
1418 engine_to_mem = &((struct cryp_platform_data *)
1419 dev->platform_data)->engine_to_mem;
1420
1421 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1422 if (!res) {
1423 dev_err(dev, "[%s]: platform_get_resource() failed",
1424 __func__);
1425 ret = -ENODEV;
1426 goto out_kfree;
1427 }
1428
1429 res = request_mem_region(res->start, resource_size(res), pdev->name);
1430 if (res == NULL) {
1431 dev_err(dev, "[%s]: request_mem_region() failed",
1432 __func__);
1433 ret = -EBUSY;
1434 goto out_kfree;
1435 }
1436
1437 device_data->base = ioremap(res->start, resource_size(res));
1438 if (!device_data->base) {
1439 dev_err(dev, "[%s]: ioremap failed!", __func__);
1440 ret = -ENOMEM;
1441 goto out_free_mem;
1442 }
1443
1444 spin_lock_init(&device_data->ctx_lock);
1445 spin_lock_init(&device_data->power_state_spinlock);
1446
1447 /* Enable power for CRYP hardware block */
1448 device_data->pwr_regulator = regulator_get(&pdev->dev, "v-ape");
1449 if (IS_ERR(device_data->pwr_regulator)) {
1450 dev_err(dev, "[%s]: could not get cryp regulator", __func__);
1451 ret = PTR_ERR(device_data->pwr_regulator);
1452 device_data->pwr_regulator = NULL;
1453 goto out_unmap;
1454 }
1455
1456 /* Enable the clk for CRYP hardware block */
1457 device_data->clk = clk_get(&pdev->dev, NULL);
1458 if (IS_ERR(device_data->clk)) {
1459 dev_err(dev, "[%s]: clk_get() failed!", __func__);
1460 ret = PTR_ERR(device_data->clk);
1461 goto out_regulator;
1462 }
1463
1464 /* Enable device power (and clock) */
1465 ret = cryp_enable_power(device_data->dev, device_data, false);
1466 if (ret) {
1467 dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1468 goto out_clk;
1469 }
1470
1471 cryp_error = cryp_check(device_data);
1472 if (cryp_error != 0) {
1473 dev_err(dev, "[%s]: cryp_init() failed!", __func__);
1474 ret = -EINVAL;
1475 goto out_power;
1476 }
1477
1478 cryp_error = cryp_configure_protection(device_data, &prot);
1479 if (cryp_error != 0) {
1480 dev_err(dev, "[%s]: cryp_configure_protection() failed!",
1481 __func__);
1482 ret = -EINVAL;
1483 goto out_power;
1484 }
1485
1486 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1487 if (!res_irq) {
1488 dev_err(dev, "[%s]: IORESOURCE_IRQ unavailable",
1489 __func__);
1490 goto out_power;
1491 }
1492
1493 ret = request_irq(res_irq->start,
1494 cryp_interrupt_handler,
1495 0,
1496 "cryp1",
1497 device_data);
1498 if (ret) {
1499 dev_err(dev, "[%s]: Unable to request IRQ", __func__);
1500 goto out_power;
1501 }
1502
1503 if (cryp_mode == CRYP_MODE_DMA)
1504 cryp_dma_setup_channel(device_data, dev);
1505
1506 platform_set_drvdata(pdev, device_data);
1507
1508 /* Put the new device into the device list... */
1509 klist_add_tail(&device_data->list_node, &driver_data.device_list);
1510
1511 /* ... and signal that a new device is available. */
1512 up(&driver_data.device_allocation);
1513
1514 atomic_set(&session_id, 1);
1515
1516 ret = cryp_algs_register_all();
1517 if (ret) {
1518 dev_err(dev, "[%s]: cryp_algs_register_all() failed!",
1519 __func__);
1520 goto out_power;
1521 }
1522
1523 return 0;
1524
1525out_power:
1526 cryp_disable_power(device_data->dev, device_data, false);
1527
1528out_clk:
1529 clk_put(device_data->clk);
1530
1531out_regulator:
1532 regulator_put(device_data->pwr_regulator);
1533
1534out_unmap:
1535 iounmap(device_data->base);
1536
1537out_free_mem:
1538 release_mem_region(res->start, resource_size(res));
1539
1540out_kfree:
1541 kfree(device_data);
1542out:
1543 return ret;
1544}
1545
1546static int ux500_cryp_remove(struct platform_device *pdev)
1547{
1548 struct resource *res = NULL;
1549 struct resource *res_irq = NULL;
1550 struct cryp_device_data *device_data;
1551
1552 dev_dbg(&pdev->dev, "[%s]", __func__);
1553 device_data = platform_get_drvdata(pdev);
1554 if (!device_data) {
1555 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1556 __func__);
1557 return -ENOMEM;
1558 }
1559
1560 /* Try to decrease the number of available devices. */
1561 if (down_trylock(&driver_data.device_allocation))
1562 return -EBUSY;
1563
1564 /* Check that the device is free */
1565 spin_lock(&device_data->ctx_lock);
1566 /* current_ctx allocates a device, NULL = unallocated */
1567 if (device_data->current_ctx) {
1568 /* The device is busy */
1569 spin_unlock(&device_data->ctx_lock);
1570 /* Return the device to the pool. */
1571 up(&driver_data.device_allocation);
1572 return -EBUSY;
1573 }
1574
1575 spin_unlock(&device_data->ctx_lock);
1576
1577 /* Remove the device from the list */
1578 if (klist_node_attached(&device_data->list_node))
1579 klist_remove(&device_data->list_node);
1580
1581 /* If this was the last device, remove the services */
1582 if (list_empty(&driver_data.device_list.k_list))
1583 cryp_algs_unregister_all();
1584
1585 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1586 if (!res_irq)
1587 dev_err(&pdev->dev, "[%s]: IORESOURCE_IRQ, unavailable",
1588 __func__);
1589 else {
1590 disable_irq(res_irq->start);
1591 free_irq(res_irq->start, device_data);
1592 }
1593
1594 if (cryp_disable_power(&pdev->dev, device_data, false))
1595 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1596 __func__);
1597
1598 clk_put(device_data->clk);
1599 regulator_put(device_data->pwr_regulator);
1600
1601 iounmap(device_data->base);
1602
1603 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1604 if (res)
1605 release_mem_region(res->start, res->end - res->start + 1);
1606
1607 kfree(device_data);
1608
1609 return 0;
1610}
1611
1612static void ux500_cryp_shutdown(struct platform_device *pdev)
1613{
1614 struct resource *res_irq = NULL;
1615 struct cryp_device_data *device_data;
1616
1617 dev_dbg(&pdev->dev, "[%s]", __func__);
1618
1619 device_data = platform_get_drvdata(pdev);
1620 if (!device_data) {
1621 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1622 __func__);
1623 return;
1624 }
1625
1626 /* Check that the device is free */
1627 spin_lock(&device_data->ctx_lock);
1628 /* current_ctx allocates a device, NULL = unallocated */
1629 if (!device_data->current_ctx) {
1630 if (down_trylock(&driver_data.device_allocation))
1631 dev_dbg(&pdev->dev, "[%s]: Cryp still in use!"
1632 "Shutting down anyway...", __func__);
1633 /**
1634 * (Allocate the device)
1635 * Need to set this to non-null (dummy) value,
1636 * to avoid usage if context switching.
1637 */
1638 device_data->current_ctx++;
1639 }
1640 spin_unlock(&device_data->ctx_lock);
1641
1642 /* Remove the device from the list */
1643 if (klist_node_attached(&device_data->list_node))
1644 klist_remove(&device_data->list_node);
1645
1646 /* If this was the last device, remove the services */
1647 if (list_empty(&driver_data.device_list.k_list))
1648 cryp_algs_unregister_all();
1649
1650 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1651 if (!res_irq)
1652 dev_err(&pdev->dev, "[%s]: IORESOURCE_IRQ, unavailable",
1653 __func__);
1654 else {
1655 disable_irq(res_irq->start);
1656 free_irq(res_irq->start, device_data);
1657 }
1658
1659 if (cryp_disable_power(&pdev->dev, device_data, false))
1660 dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1661 __func__);
1662
1663}
1664
1665static int ux500_cryp_suspend(struct platform_device *pdev, pm_message_t state)
1666{
1667 int ret;
1668 struct cryp_device_data *device_data;
1669 struct resource *res_irq;
1670 struct cryp_ctx *temp_ctx = NULL;
1671
1672 dev_dbg(&pdev->dev, "[%s]", __func__);
1673
1674 /* Handle state? */
1675 device_data = platform_get_drvdata(pdev);
1676 if (!device_data) {
1677 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1678 __func__);
1679 return -ENOMEM;
1680 }
1681
1682 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1683 if (!res_irq)
1684 dev_err(&pdev->dev, "[%s]: IORESOURCE_IRQ, unavailable",
1685 __func__);
1686 else
1687 disable_irq(res_irq->start);
1688
1689 spin_lock(&device_data->ctx_lock);
1690 if (!device_data->current_ctx)
1691 device_data->current_ctx++;
1692 spin_unlock(&device_data->ctx_lock);
1693
1694 if (device_data->current_ctx == ++temp_ctx) {
1695 if (down_interruptible(&driver_data.device_allocation))
1696 dev_dbg(&pdev->dev, "[%s]: down_interruptible() "
1697 "failed", __func__);
1698 ret = cryp_disable_power(&pdev->dev, device_data, false);
1699
1700 } else
1701 ret = cryp_disable_power(&pdev->dev, device_data, true);
1702
1703 if (ret)
1704 dev_err(&pdev->dev, "[%s]: cryp_disable_power()", __func__);
1705
1706 return ret;
1707}
1708
1709static int ux500_cryp_resume(struct platform_device *pdev)
1710{
1711 int ret = 0;
1712 struct cryp_device_data *device_data;
1713 struct resource *res_irq;
1714 struct cryp_ctx *temp_ctx = NULL;
1715
1716 dev_dbg(&pdev->dev, "[%s]", __func__);
1717
1718 device_data = platform_get_drvdata(pdev);
1719 if (!device_data) {
1720 dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1721 __func__);
1722 return -ENOMEM;
1723 }
1724
1725 spin_lock(&device_data->ctx_lock);
1726 if (device_data->current_ctx == ++temp_ctx)
1727 device_data->current_ctx = NULL;
1728 spin_unlock(&device_data->ctx_lock);
1729
1730
1731 if (!device_data->current_ctx)
1732 up(&driver_data.device_allocation);
1733 else
1734 ret = cryp_enable_power(&pdev->dev, device_data, true);
1735
1736 if (ret)
1737 dev_err(&pdev->dev, "[%s]: cryp_enable_power() failed!",
1738 __func__);
1739 else {
1740 res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1741 if (res_irq)
1742 enable_irq(res_irq->start);
1743 }
1744
1745 return ret;
1746}
1747
1748static struct platform_driver cryp_driver = {
1749 .probe = ux500_cryp_probe,
1750 .remove = ux500_cryp_remove,
1751 .shutdown = ux500_cryp_shutdown,
1752 .suspend = ux500_cryp_suspend,
1753 .resume = ux500_cryp_resume,
1754 .driver = {
1755 .owner = THIS_MODULE,
1756 .name = "cryp1"
1757 }
1758};
1759
1760static int __init ux500_cryp_mod_init(void)
1761{
1762 pr_debug("[%s] is called!", __func__);
1763 klist_init(&driver_data.device_list, NULL, NULL);
1764 /* Initialize the semaphore to 0 devices (locked state) */
1765 sema_init(&driver_data.device_allocation, 0);
1766 return platform_driver_register(&cryp_driver);
1767}
1768
1769static void __exit ux500_cryp_mod_fini(void)
1770{
1771 pr_debug("[%s] is called!", __func__);
1772 platform_driver_unregister(&cryp_driver);
1773 return;
1774}
1775
1776module_init(ux500_cryp_mod_init);
1777module_exit(ux500_cryp_mod_fini);
1778
1779module_param(cryp_mode, int, 0);
1780
1781MODULE_DESCRIPTION("Driver for ST-Ericsson UX500 CRYP crypto engine.");
1782MODULE_ALIAS("aes-all");
1783MODULE_ALIAS("des-all");
1784
1785MODULE_LICENSE("GPL");
diff --git a/drivers/crypto/ux500/cryp/cryp_irq.c b/drivers/crypto/ux500/cryp/cryp_irq.c
new file mode 100644
index 000000000000..08d291cdbe6d
--- /dev/null
+++ b/drivers/crypto/ux500/cryp/cryp_irq.c
@@ -0,0 +1,45 @@
1/**
2 * Copyright (C) ST-Ericsson SA 2010
3 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
4 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
5 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
6 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
7 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
8 * License terms: GNU General Public License (GPL) version 2.
9 */
10
11#include <linux/kernel.h>
12#include <linux/bitmap.h>
13#include <linux/device.h>
14
15#include "cryp.h"
16#include "cryp_p.h"
17#include "cryp_irq.h"
18#include "cryp_irqp.h"
19
20void cryp_enable_irq_src(struct cryp_device_data *device_data, u32 irq_src)
21{
22 u32 i;
23
24 dev_dbg(device_data->dev, "[%s]", __func__);
25
26 i = readl_relaxed(&device_data->base->imsc);
27 i = i | irq_src;
28 writel_relaxed(i, &device_data->base->imsc);
29}
30
31void cryp_disable_irq_src(struct cryp_device_data *device_data, u32 irq_src)
32{
33 u32 i;
34
35 dev_dbg(device_data->dev, "[%s]", __func__);
36
37 i = readl_relaxed(&device_data->base->imsc);
38 i = i & ~irq_src;
39 writel_relaxed(i, &device_data->base->imsc);
40}
41
42bool cryp_pending_irq_src(struct cryp_device_data *device_data, u32 irq_src)
43{
44 return (readl_relaxed(&device_data->base->mis) & irq_src) > 0;
45}
diff --git a/drivers/crypto/ux500/cryp/cryp_irq.h b/drivers/crypto/ux500/cryp/cryp_irq.h
new file mode 100644
index 000000000000..5a7837f1b8f9
--- /dev/null
+++ b/drivers/crypto/ux500/cryp/cryp_irq.h
@@ -0,0 +1,31 @@
1/**
2 * Copyright (C) ST-Ericsson SA 2010
3 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
4 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
5 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
6 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
7 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
8 * License terms: GNU General Public License (GPL) version 2
9 */
10
11#ifndef _CRYP_IRQ_H_
12#define _CRYP_IRQ_H_
13
14#include "cryp.h"
15
16enum cryp_irq_src_id {
17 CRYP_IRQ_SRC_INPUT_FIFO = 0x1,
18 CRYP_IRQ_SRC_OUTPUT_FIFO = 0x2,
19 CRYP_IRQ_SRC_ALL = 0x3
20};
21
22/**
23 * M0 Funtions
24 */
25void cryp_enable_irq_src(struct cryp_device_data *device_data, u32 irq_src);
26
27void cryp_disable_irq_src(struct cryp_device_data *device_data, u32 irq_src);
28
29bool cryp_pending_irq_src(struct cryp_device_data *device_data, u32 irq_src);
30
31#endif /* _CRYP_IRQ_H_ */
diff --git a/drivers/crypto/ux500/cryp/cryp_irqp.h b/drivers/crypto/ux500/cryp/cryp_irqp.h
new file mode 100644
index 000000000000..8b339cc34bf8
--- /dev/null
+++ b/drivers/crypto/ux500/cryp/cryp_irqp.h
@@ -0,0 +1,125 @@
1/**
2 * Copyright (C) ST-Ericsson SA 2010
3 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
4 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
5 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
6 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
7 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
8 * License terms: GNU General Public License (GPL) version 2
9 */
10
11#ifndef __CRYP_IRQP_H_
12#define __CRYP_IRQP_H_
13
14#include "cryp_irq.h"
15
16/**
17 *
18 * CRYP Registers - Offset mapping
19 * +-----------------+
20 * 00h | CRYP_CR | Configuration register
21 * +-----------------+
22 * 04h | CRYP_SR | Status register
23 * +-----------------+
24 * 08h | CRYP_DIN | Data In register
25 * +-----------------+
26 * 0ch | CRYP_DOUT | Data out register
27 * +-----------------+
28 * 10h | CRYP_DMACR | DMA control register
29 * +-----------------+
30 * 14h | CRYP_IMSC | IMSC
31 * +-----------------+
32 * 18h | CRYP_RIS | Raw interrupt status
33 * +-----------------+
34 * 1ch | CRYP_MIS | Masked interrupt status.
35 * +-----------------+
36 * Key registers
37 * IVR registers
38 * Peripheral
39 * Cell IDs
40 *
41 * Refer data structure for other register map
42 */
43
44/**
45 * struct cryp_register
46 * @cr - Configuration register
47 * @status - Status register
48 * @din - Data input register
49 * @din_size - Data input size register
50 * @dout - Data output register
51 * @dout_size - Data output size register
52 * @dmacr - Dma control register
53 * @imsc - Interrupt mask set/clear register
54 * @ris - Raw interrupt status
55 * @mis - Masked interrupt statu register
56 * @key_1_l - Key register 1 L
57 * @key_1_r - Key register 1 R
58 * @key_2_l - Key register 2 L
59 * @key_2_r - Key register 2 R
60 * @key_3_l - Key register 3 L
61 * @key_3_r - Key register 3 R
62 * @key_4_l - Key register 4 L
63 * @key_4_r - Key register 4 R
64 * @init_vect_0_l - init vector 0 L
65 * @init_vect_0_r - init vector 0 R
66 * @init_vect_1_l - init vector 1 L
67 * @init_vect_1_r - init vector 1 R
68 * @cryp_unused1 - unused registers
69 * @itcr - Integration test control register
70 * @itip - Integration test input register
71 * @itop - Integration test output register
72 * @cryp_unused2 - unused registers
73 * @periphId0 - FE0 CRYP Peripheral Identication Register
74 * @periphId1 - FE4
75 * @periphId2 - FE8
76 * @periphId3 - FEC
77 * @pcellId0 - FF0 CRYP PCell Identication Register
78 * @pcellId1 - FF4
79 * @pcellId2 - FF8
80 * @pcellId3 - FFC
81 */
82struct cryp_register {
83 u32 cr; /* Configuration register */
84 u32 sr; /* Status register */
85 u32 din; /* Data input register */
86 u32 din_size; /* Data input size register */
87 u32 dout; /* Data output register */
88 u32 dout_size; /* Data output size register */
89 u32 dmacr; /* Dma control register */
90 u32 imsc; /* Interrupt mask set/clear register */
91 u32 ris; /* Raw interrupt status */
92 u32 mis; /* Masked interrupt statu register */
93
94 u32 key_1_l; /*Key register 1 L */
95 u32 key_1_r; /*Key register 1 R */
96 u32 key_2_l; /*Key register 2 L */
97 u32 key_2_r; /*Key register 2 R */
98 u32 key_3_l; /*Key register 3 L */
99 u32 key_3_r; /*Key register 3 R */
100 u32 key_4_l; /*Key register 4 L */
101 u32 key_4_r; /*Key register 4 R */
102
103 u32 init_vect_0_l; /*init vector 0 L */
104 u32 init_vect_0_r; /*init vector 0 R */
105 u32 init_vect_1_l; /*init vector 1 L */
106 u32 init_vect_1_r; /*init vector 1 R */
107
108 u32 cryp_unused1[(0x80 - 0x58) / sizeof(u32)]; /* unused registers */
109 u32 itcr; /*Integration test control register */
110 u32 itip; /*Integration test input register */
111 u32 itop; /*Integration test output register */
112 u32 cryp_unused2[(0xFE0 - 0x8C) / sizeof(u32)]; /* unused registers */
113
114 u32 periphId0; /* FE0 CRYP Peripheral Identication Register */
115 u32 periphId1; /* FE4 */
116 u32 periphId2; /* FE8 */
117 u32 periphId3; /* FEC */
118
119 u32 pcellId0; /* FF0 CRYP PCell Identication Register */
120 u32 pcellId1; /* FF4 */
121 u32 pcellId2; /* FF8 */
122 u32 pcellId3; /* FFC */
123};
124
125#endif
diff --git a/drivers/crypto/ux500/cryp/cryp_p.h b/drivers/crypto/ux500/cryp/cryp_p.h
new file mode 100644
index 000000000000..0e070829edce
--- /dev/null
+++ b/drivers/crypto/ux500/cryp/cryp_p.h
@@ -0,0 +1,124 @@
1/**
2 * Copyright (C) ST-Ericsson SA 2010
3 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
4 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
5 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
6 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
7 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
8 * License terms: GNU General Public License (GPL) version 2
9 */
10
11#ifndef _CRYP_P_H_
12#define _CRYP_P_H_
13
14#include <linux/io.h>
15#include <linux/bitops.h>
16
17#include "cryp.h"
18#include "cryp_irqp.h"
19
20/**
21 * Generic Macros
22 */
23#define CRYP_SET_BITS(reg_name, mask) \
24 writel_relaxed((readl_relaxed(reg_name) | mask), reg_name)
25
26#define CRYP_WRITE_BIT(reg_name, val, mask) \
27 writel_relaxed(((readl_relaxed(reg_name) & ~(mask)) |\
28 ((val) & (mask))), reg_name)
29
30#define CRYP_TEST_BITS(reg_name, val) \
31 (readl_relaxed(reg_name) & (val))
32
33#define CRYP_PUT_BITS(reg, val, shift, mask) \
34 writel_relaxed(((readl_relaxed(reg) & ~(mask)) | \
35 (((u32)val << shift) & (mask))), reg)
36
37/**
38 * CRYP specific Macros
39 */
40#define CRYP_PERIPHERAL_ID0 0xE3
41#define CRYP_PERIPHERAL_ID1 0x05
42
43#define CRYP_PERIPHERAL_ID2_DB8500 0x28
44#define CRYP_PERIPHERAL_ID2_DB5500 0x29
45#define CRYP_PERIPHERAL_ID3 0x00
46
47#define CRYP_PCELL_ID0 0x0D
48#define CRYP_PCELL_ID1 0xF0
49#define CRYP_PCELL_ID2 0x05
50#define CRYP_PCELL_ID3 0xB1
51
52/**
53 * CRYP register default values
54 */
55#define MAX_DEVICE_SUPPORT 2
56
57/* Priv set, keyrden set and datatype 8bits swapped set as default. */
58#define CRYP_CR_DEFAULT 0x0482
59#define CRYP_DMACR_DEFAULT 0x0
60#define CRYP_IMSC_DEFAULT 0x0
61#define CRYP_DIN_DEFAULT 0x0
62#define CRYP_DOUT_DEFAULT 0x0
63#define CRYP_KEY_DEFAULT 0x0
64#define CRYP_INIT_VECT_DEFAULT 0x0
65
66/**
67 * CRYP Control register specific mask
68 */
69#define CRYP_CR_SECURE_MASK BIT(0)
70#define CRYP_CR_PRLG_MASK BIT(1)
71#define CRYP_CR_ALGODIR_MASK BIT(2)
72#define CRYP_CR_ALGOMODE_MASK (BIT(5) | BIT(4) | BIT(3))
73#define CRYP_CR_DATATYPE_MASK (BIT(7) | BIT(6))
74#define CRYP_CR_KEYSIZE_MASK (BIT(9) | BIT(8))
75#define CRYP_CR_KEYRDEN_MASK BIT(10)
76#define CRYP_CR_KSE_MASK BIT(11)
77#define CRYP_CR_START_MASK BIT(12)
78#define CRYP_CR_INIT_MASK BIT(13)
79#define CRYP_CR_FFLUSH_MASK BIT(14)
80#define CRYP_CR_CRYPEN_MASK BIT(15)
81#define CRYP_CR_CONTEXT_SAVE_MASK (CRYP_CR_SECURE_MASK |\
82 CRYP_CR_PRLG_MASK |\
83 CRYP_CR_ALGODIR_MASK |\
84 CRYP_CR_ALGOMODE_MASK |\
85 CRYP_CR_DATATYPE_MASK |\
86 CRYP_CR_KEYSIZE_MASK |\
87 CRYP_CR_KEYRDEN_MASK |\
88 CRYP_CR_DATATYPE_MASK)
89
90
91#define CRYP_SR_INFIFO_READY_MASK (BIT(0) | BIT(1))
92#define CRYP_SR_IFEM_MASK BIT(0)
93#define CRYP_SR_BUSY_MASK BIT(4)
94
95/**
96 * Bit position used while setting bits in register
97 */
98#define CRYP_CR_PRLG_POS 1
99#define CRYP_CR_ALGODIR_POS 2
100#define CRYP_CR_ALGOMODE_POS 3
101#define CRYP_CR_DATATYPE_POS 6
102#define CRYP_CR_KEYSIZE_POS 8
103#define CRYP_CR_KEYRDEN_POS 10
104#define CRYP_CR_KSE_POS 11
105#define CRYP_CR_START_POS 12
106#define CRYP_CR_INIT_POS 13
107#define CRYP_CR_CRYPEN_POS 15
108
109#define CRYP_SR_BUSY_POS 4
110
111/**
112 * CRYP PCRs------PC_NAND control register
113 * BIT_MASK
114 */
115#define CRYP_DMA_REQ_MASK (BIT(1) | BIT(0))
116#define CRYP_DMA_REQ_MASK_POS 0
117
118
119struct cryp_system_context {
120 /* CRYP Register structure */
121 struct cryp_register *p_cryp_reg[MAX_DEVICE_SUPPORT];
122};
123
124#endif