aboutsummaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2009-03-26 14:04:34 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2009-03-26 14:04:34 -0400
commit562f477a54478002ddfbb5b85627c009ca41e71d (patch)
tree52384cc554ae64cc7a26878d64d606f40fd703ce /drivers/crypto
parentada19a31a90b4f46c040c25ef4ef8ffc203c7fc6 (diff)
parent949abe574739848b1e68271fbac86c3cb4506aad (diff)
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (29 commits) crypto: sha512-s390 - Add missing block size hwrng: timeriomem - Breaks an allyesconfig build on s390: nlattr: Fix build error with NET off crypto: testmgr - add zlib test crypto: zlib - New zlib crypto module, using pcomp crypto: testmgr - Add support for the pcomp interface crypto: compress - Add pcomp interface netlink: Move netlink attribute parsing support to lib crypto: Fix dead links hwrng: timeriomem - New driver crypto: chainiv - Use kcrypto_wq instead of keventd_wq crypto: cryptd - Per-CPU thread implementation based on kcrypto_wq crypto: api - Use dedicated workqueue for crypto subsystem crypto: testmgr - Test skciphers with no IVs crypto: aead - Avoid infinite loop when nivaead fails selftest crypto: skcipher - Avoid infinite loop when cipher fails selftest crypto: api - Fix crypto_alloc_tfm/create_create_tfm return convention crypto: api - crypto_alg_mod_lookup either tested or untested crypto: amcc - Add crypt4xx driver crypto: ansi_cprng - Add maintainer ...
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/Kconfig15
-rw-r--r--drivers/crypto/Makefile1
-rw-r--r--drivers/crypto/amcc/Makefile2
-rw-r--r--drivers/crypto/amcc/crypto4xx_alg.c293
-rw-r--r--drivers/crypto/amcc/crypto4xx_core.c1310
-rw-r--r--drivers/crypto/amcc/crypto4xx_core.h177
-rw-r--r--drivers/crypto/amcc/crypto4xx_reg_def.h284
-rw-r--r--drivers/crypto/amcc/crypto4xx_sa.c108
-rw-r--r--drivers/crypto/amcc/crypto4xx_sa.h243
9 files changed, 2430 insertions, 3 deletions
diff --git a/drivers/crypto/Kconfig b/drivers/crypto/Kconfig
index e522144cba3..01afd758072 100644
--- a/drivers/crypto/Kconfig
+++ b/drivers/crypto/Kconfig
@@ -86,7 +86,7 @@ config ZCRYPT_MONOLITHIC
86config CRYPTO_SHA1_S390 86config CRYPTO_SHA1_S390
87 tristate "SHA1 digest algorithm" 87 tristate "SHA1 digest algorithm"
88 depends on S390 88 depends on S390
89 select CRYPTO_ALGAPI 89 select CRYPTO_HASH
90 help 90 help
91 This is the s390 hardware accelerated implementation of the 91 This is the s390 hardware accelerated implementation of the
92 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). 92 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
@@ -94,7 +94,7 @@ config CRYPTO_SHA1_S390
94config CRYPTO_SHA256_S390 94config CRYPTO_SHA256_S390
95 tristate "SHA256 digest algorithm" 95 tristate "SHA256 digest algorithm"
96 depends on S390 96 depends on S390
97 select CRYPTO_ALGAPI 97 select CRYPTO_HASH
98 help 98 help
99 This is the s390 hardware accelerated implementation of the 99 This is the s390 hardware accelerated implementation of the
100 SHA256 secure hash standard (DFIPS 180-2). 100 SHA256 secure hash standard (DFIPS 180-2).
@@ -105,7 +105,7 @@ config CRYPTO_SHA256_S390
105config CRYPTO_SHA512_S390 105config CRYPTO_SHA512_S390
106 tristate "SHA384 and SHA512 digest algorithm" 106 tristate "SHA384 and SHA512 digest algorithm"
107 depends on S390 107 depends on S390
108 select CRYPTO_ALGAPI 108 select CRYPTO_HASH
109 help 109 help
110 This is the s390 hardware accelerated implementation of the 110 This is the s390 hardware accelerated implementation of the
111 SHA512 secure hash standard. 111 SHA512 secure hash standard.
@@ -200,4 +200,13 @@ config CRYPTO_DEV_IXP4XX
200 help 200 help
201 Driver for the IXP4xx NPE crypto engine. 201 Driver for the IXP4xx NPE crypto engine.
202 202
203config CRYPTO_DEV_PPC4XX
204 tristate "Driver AMCC PPC4xx crypto accelerator"
205 depends on PPC && 4xx
206 select CRYPTO_HASH
207 select CRYPTO_ALGAPI
208 select CRYPTO_BLKCIPHER
209 help
210 This option allows you to have support for AMCC crypto acceleration.
211
203endif # CRYPTO_HW 212endif # CRYPTO_HW
diff --git a/drivers/crypto/Makefile b/drivers/crypto/Makefile
index 73557b2968d..9bf4a2bc884 100644
--- a/drivers/crypto/Makefile
+++ b/drivers/crypto/Makefile
@@ -4,3 +4,4 @@ obj-$(CONFIG_CRYPTO_DEV_GEODE) += geode-aes.o
4obj-$(CONFIG_CRYPTO_DEV_HIFN_795X) += hifn_795x.o 4obj-$(CONFIG_CRYPTO_DEV_HIFN_795X) += hifn_795x.o
5obj-$(CONFIG_CRYPTO_DEV_TALITOS) += talitos.o 5obj-$(CONFIG_CRYPTO_DEV_TALITOS) += talitos.o
6obj-$(CONFIG_CRYPTO_DEV_IXP4XX) += ixp4xx_crypto.o 6obj-$(CONFIG_CRYPTO_DEV_IXP4XX) += ixp4xx_crypto.o
7obj-$(CONFIG_CRYPTO_DEV_PPC4XX) += amcc/
diff --git a/drivers/crypto/amcc/Makefile b/drivers/crypto/amcc/Makefile
new file mode 100644
index 00000000000..aa376e8d5ed
--- /dev/null
+++ b/drivers/crypto/amcc/Makefile
@@ -0,0 +1,2 @@
1obj-$(CONFIG_CRYPTO_DEV_PPC4XX) += crypto4xx.o
2crypto4xx-objs := crypto4xx_core.o crypto4xx_alg.o crypto4xx_sa.o
diff --git a/drivers/crypto/amcc/crypto4xx_alg.c b/drivers/crypto/amcc/crypto4xx_alg.c
new file mode 100644
index 00000000000..61b6e1bec8c
--- /dev/null
+++ b/drivers/crypto/amcc/crypto4xx_alg.c
@@ -0,0 +1,293 @@
1/**
2 * AMCC SoC PPC4xx Crypto Driver
3 *
4 * Copyright (c) 2008 Applied Micro Circuits Corporation.
5 * All rights reserved. James Hsiao <jhsiao@amcc.com>
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * This file implements the Linux crypto algorithms.
18 */
19
20#include <linux/kernel.h>
21#include <linux/interrupt.h>
22#include <linux/spinlock_types.h>
23#include <linux/scatterlist.h>
24#include <linux/crypto.h>
25#include <linux/hash.h>
26#include <crypto/internal/hash.h>
27#include <linux/dma-mapping.h>
28#include <crypto/algapi.h>
29#include <crypto/aes.h>
30#include <crypto/sha.h>
31#include "crypto4xx_reg_def.h"
32#include "crypto4xx_sa.h"
33#include "crypto4xx_core.h"
34
35void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h,
36 u32 save_iv, u32 ld_h, u32 ld_iv, u32 hdr_proc,
37 u32 h, u32 c, u32 pad_type, u32 op_grp, u32 op,
38 u32 dir)
39{
40 sa->sa_command_0.w = 0;
41 sa->sa_command_0.bf.save_hash_state = save_h;
42 sa->sa_command_0.bf.save_iv = save_iv;
43 sa->sa_command_0.bf.load_hash_state = ld_h;
44 sa->sa_command_0.bf.load_iv = ld_iv;
45 sa->sa_command_0.bf.hdr_proc = hdr_proc;
46 sa->sa_command_0.bf.hash_alg = h;
47 sa->sa_command_0.bf.cipher_alg = c;
48 sa->sa_command_0.bf.pad_type = pad_type & 3;
49 sa->sa_command_0.bf.extend_pad = pad_type >> 2;
50 sa->sa_command_0.bf.op_group = op_grp;
51 sa->sa_command_0.bf.opcode = op;
52 sa->sa_command_0.bf.dir = dir;
53}
54
55void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm, u32 hmac_mc,
56 u32 cfb, u32 esn, u32 sn_mask, u32 mute,
57 u32 cp_pad, u32 cp_pay, u32 cp_hdr)
58{
59 sa->sa_command_1.w = 0;
60 sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
61 sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
62 sa->sa_command_1.bf.feedback_mode = cfb,
63 sa->sa_command_1.bf.sa_rev = 1;
64 sa->sa_command_1.bf.extended_seq_num = esn;
65 sa->sa_command_1.bf.seq_num_mask = sn_mask;
66 sa->sa_command_1.bf.mutable_bit_proc = mute;
67 sa->sa_command_1.bf.copy_pad = cp_pad;
68 sa->sa_command_1.bf.copy_payload = cp_pay;
69 sa->sa_command_1.bf.copy_hdr = cp_hdr;
70}
71
72int crypto4xx_encrypt(struct ablkcipher_request *req)
73{
74 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
75
76 ctx->direction = DIR_OUTBOUND;
77 ctx->hash_final = 0;
78 ctx->is_hash = 0;
79 ctx->pd_ctl = 0x1;
80
81 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
82 req->nbytes, req->info,
83 get_dynamic_sa_iv_size(ctx));
84}
85
86int crypto4xx_decrypt(struct ablkcipher_request *req)
87{
88 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
89
90 ctx->direction = DIR_INBOUND;
91 ctx->hash_final = 0;
92 ctx->is_hash = 0;
93 ctx->pd_ctl = 1;
94
95 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
96 req->nbytes, req->info,
97 get_dynamic_sa_iv_size(ctx));
98}
99
100/**
101 * AES Functions
102 */
103static int crypto4xx_setkey_aes(struct crypto_ablkcipher *cipher,
104 const u8 *key,
105 unsigned int keylen,
106 unsigned char cm,
107 u8 fb)
108{
109 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
110 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
111 struct dynamic_sa_ctl *sa;
112 int rc;
113
114 if (keylen != AES_KEYSIZE_256 &&
115 keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_128) {
116 crypto_ablkcipher_set_flags(cipher,
117 CRYPTO_TFM_RES_BAD_KEY_LEN);
118 return -EINVAL;
119 }
120
121 /* Create SA */
122 if (ctx->sa_in_dma_addr || ctx->sa_out_dma_addr)
123 crypto4xx_free_sa(ctx);
124
125 rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
126 if (rc)
127 return rc;
128
129 if (ctx->state_record_dma_addr == 0) {
130 rc = crypto4xx_alloc_state_record(ctx);
131 if (rc) {
132 crypto4xx_free_sa(ctx);
133 return rc;
134 }
135 }
136 /* Setup SA */
137 sa = (struct dynamic_sa_ctl *) ctx->sa_in;
138 ctx->hash_final = 0;
139
140 set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, SA_NOT_SAVE_IV,
141 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
142 SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
143 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
144 SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
145 DIR_INBOUND);
146
147 set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH,
148 fb, SA_EXTENDED_SN_OFF,
149 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
150 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
151 SA_NOT_COPY_HDR);
152 crypto4xx_memcpy_le(ctx->sa_in + get_dynamic_sa_offset_key_field(ctx),
153 key, keylen);
154 sa->sa_contents = SA_AES_CONTENTS | (keylen << 2);
155 sa->sa_command_1.bf.key_len = keylen >> 3;
156 ctx->is_hash = 0;
157 ctx->direction = DIR_INBOUND;
158 memcpy(ctx->sa_in + get_dynamic_sa_offset_state_ptr_field(ctx),
159 (void *)&ctx->state_record_dma_addr, 4);
160 ctx->offset_to_sr_ptr = get_dynamic_sa_offset_state_ptr_field(ctx);
161
162 memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
163 sa = (struct dynamic_sa_ctl *) ctx->sa_out;
164 sa->sa_command_0.bf.dir = DIR_OUTBOUND;
165
166 return 0;
167}
168
169int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher,
170 const u8 *key, unsigned int keylen)
171{
172 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
173 CRYPTO_FEEDBACK_MODE_NO_FB);
174}
175
176/**
177 * HASH SHA1 Functions
178 */
179static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
180 unsigned int sa_len,
181 unsigned char ha,
182 unsigned char hm)
183{
184 struct crypto_alg *alg = tfm->__crt_alg;
185 struct crypto4xx_alg *my_alg = crypto_alg_to_crypto4xx_alg(alg);
186 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
187 struct dynamic_sa_ctl *sa;
188 struct dynamic_sa_hash160 *sa_in;
189 int rc;
190
191 ctx->dev = my_alg->dev;
192 ctx->is_hash = 1;
193 ctx->hash_final = 0;
194
195 /* Create SA */
196 if (ctx->sa_in_dma_addr || ctx->sa_out_dma_addr)
197 crypto4xx_free_sa(ctx);
198
199 rc = crypto4xx_alloc_sa(ctx, sa_len);
200 if (rc)
201 return rc;
202
203 if (ctx->state_record_dma_addr == 0) {
204 crypto4xx_alloc_state_record(ctx);
205 if (!ctx->state_record_dma_addr) {
206 crypto4xx_free_sa(ctx);
207 return -ENOMEM;
208 }
209 }
210
211 tfm->crt_ahash.reqsize = sizeof(struct crypto4xx_ctx);
212 sa = (struct dynamic_sa_ctl *) ctx->sa_in;
213 set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
214 SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
215 SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
216 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
217 SA_OPCODE_HASH, DIR_INBOUND);
218 set_dynamic_sa_command_1(sa, 0, SA_HASH_MODE_HASH,
219 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
220 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
221 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
222 SA_NOT_COPY_HDR);
223 ctx->direction = DIR_INBOUND;
224 sa->sa_contents = SA_HASH160_CONTENTS;
225 sa_in = (struct dynamic_sa_hash160 *) ctx->sa_in;
226 /* Need to zero hash digest in SA */
227 memset(sa_in->inner_digest, 0, sizeof(sa_in->inner_digest));
228 memset(sa_in->outer_digest, 0, sizeof(sa_in->outer_digest));
229 sa_in->state_ptr = ctx->state_record_dma_addr;
230 ctx->offset_to_sr_ptr = get_dynamic_sa_offset_state_ptr_field(ctx);
231
232 return 0;
233}
234
235int crypto4xx_hash_init(struct ahash_request *req)
236{
237 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
238 int ds;
239 struct dynamic_sa_ctl *sa;
240
241 sa = (struct dynamic_sa_ctl *) ctx->sa_in;
242 ds = crypto_ahash_digestsize(
243 __crypto_ahash_cast(req->base.tfm));
244 sa->sa_command_0.bf.digest_len = ds >> 2;
245 sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
246 ctx->is_hash = 1;
247 ctx->direction = DIR_INBOUND;
248
249 return 0;
250}
251
252int crypto4xx_hash_update(struct ahash_request *req)
253{
254 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
255
256 ctx->is_hash = 1;
257 ctx->hash_final = 0;
258 ctx->pd_ctl = 0x11;
259 ctx->direction = DIR_INBOUND;
260
261 return crypto4xx_build_pd(&req->base, ctx, req->src,
262 (struct scatterlist *) req->result,
263 req->nbytes, NULL, 0);
264}
265
266int crypto4xx_hash_final(struct ahash_request *req)
267{
268 return 0;
269}
270
271int crypto4xx_hash_digest(struct ahash_request *req)
272{
273 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
274
275 ctx->hash_final = 1;
276 ctx->pd_ctl = 0x11;
277 ctx->direction = DIR_INBOUND;
278
279 return crypto4xx_build_pd(&req->base, ctx, req->src,
280 (struct scatterlist *) req->result,
281 req->nbytes, NULL, 0);
282}
283
284/**
285 * SHA1 Algorithm
286 */
287int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
288{
289 return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
290 SA_HASH_MODE_HASH);
291}
292
293
diff --git a/drivers/crypto/amcc/crypto4xx_core.c b/drivers/crypto/amcc/crypto4xx_core.c
new file mode 100644
index 00000000000..4c0dfb2b872
--- /dev/null
+++ b/drivers/crypto/amcc/crypto4xx_core.c
@@ -0,0 +1,1310 @@
1/**
2 * AMCC SoC PPC4xx Crypto Driver
3 *
4 * Copyright (c) 2008 Applied Micro Circuits Corporation.
5 * All rights reserved. James Hsiao <jhsiao@amcc.com>
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * This file implements AMCC crypto offload Linux device driver for use with
18 * Linux CryptoAPI.
19 */
20
21#include <linux/kernel.h>
22#include <linux/interrupt.h>
23#include <linux/spinlock_types.h>
24#include <linux/random.h>
25#include <linux/scatterlist.h>
26#include <linux/crypto.h>
27#include <linux/dma-mapping.h>
28#include <linux/platform_device.h>
29#include <linux/init.h>
30#include <linux/of_platform.h>
31#include <asm/dcr.h>
32#include <asm/dcr-regs.h>
33#include <asm/cacheflush.h>
34#include <crypto/internal/hash.h>
35#include <crypto/algapi.h>
36#include <crypto/aes.h>
37#include <crypto/sha.h>
38#include "crypto4xx_reg_def.h"
39#include "crypto4xx_core.h"
40#include "crypto4xx_sa.h"
41
42#define PPC4XX_SEC_VERSION_STR "0.5"
43
44/**
45 * PPC4xx Crypto Engine Initialization Routine
46 */
47static void crypto4xx_hw_init(struct crypto4xx_device *dev)
48{
49 union ce_ring_size ring_size;
50 union ce_ring_contol ring_ctrl;
51 union ce_part_ring_size part_ring_size;
52 union ce_io_threshold io_threshold;
53 u32 rand_num;
54 union ce_pe_dma_cfg pe_dma_cfg;
55
56 writel(PPC4XX_BYTE_ORDER, dev->ce_base + CRYPTO4XX_BYTE_ORDER_CFG);
57 /* setup pe dma, include reset sg, pdr and pe, then release reset */
58 pe_dma_cfg.w = 0;
59 pe_dma_cfg.bf.bo_sgpd_en = 1;
60 pe_dma_cfg.bf.bo_data_en = 0;
61 pe_dma_cfg.bf.bo_sa_en = 1;
62 pe_dma_cfg.bf.bo_pd_en = 1;
63 pe_dma_cfg.bf.dynamic_sa_en = 1;
64 pe_dma_cfg.bf.reset_sg = 1;
65 pe_dma_cfg.bf.reset_pdr = 1;
66 pe_dma_cfg.bf.reset_pe = 1;
67 writel(pe_dma_cfg.w, dev->ce_base + CRYPTO4XX_PE_DMA_CFG);
68 /* un reset pe,sg and pdr */
69 pe_dma_cfg.bf.pe_mode = 0;
70 pe_dma_cfg.bf.reset_sg = 0;
71 pe_dma_cfg.bf.reset_pdr = 0;
72 pe_dma_cfg.bf.reset_pe = 0;
73 pe_dma_cfg.bf.bo_td_en = 0;
74 writel(pe_dma_cfg.w, dev->ce_base + CRYPTO4XX_PE_DMA_CFG);
75 writel(dev->pdr_pa, dev->ce_base + CRYPTO4XX_PDR_BASE);
76 writel(dev->pdr_pa, dev->ce_base + CRYPTO4XX_RDR_BASE);
77 writel(PPC4XX_PRNG_CTRL_AUTO_EN, dev->ce_base + CRYPTO4XX_PRNG_CTRL);
78 get_random_bytes(&rand_num, sizeof(rand_num));
79 writel(rand_num, dev->ce_base + CRYPTO4XX_PRNG_SEED_L);
80 get_random_bytes(&rand_num, sizeof(rand_num));
81 writel(rand_num, dev->ce_base + CRYPTO4XX_PRNG_SEED_H);
82 ring_size.w = 0;
83 ring_size.bf.ring_offset = PPC4XX_PD_SIZE;
84 ring_size.bf.ring_size = PPC4XX_NUM_PD;
85 writel(ring_size.w, dev->ce_base + CRYPTO4XX_RING_SIZE);
86 ring_ctrl.w = 0;
87 writel(ring_ctrl.w, dev->ce_base + CRYPTO4XX_RING_CTRL);
88 writel(PPC4XX_DC_3DES_EN, dev->ce_base + CRYPTO4XX_DEVICE_CTRL);
89 writel(dev->gdr_pa, dev->ce_base + CRYPTO4XX_GATH_RING_BASE);
90 writel(dev->sdr_pa, dev->ce_base + CRYPTO4XX_SCAT_RING_BASE);
91 part_ring_size.w = 0;
92 part_ring_size.bf.sdr_size = PPC4XX_SDR_SIZE;
93 part_ring_size.bf.gdr_size = PPC4XX_GDR_SIZE;
94 writel(part_ring_size.w, dev->ce_base + CRYPTO4XX_PART_RING_SIZE);
95 writel(PPC4XX_SD_BUFFER_SIZE, dev->ce_base + CRYPTO4XX_PART_RING_CFG);
96 io_threshold.w = 0;
97 io_threshold.bf.output_threshold = PPC4XX_OUTPUT_THRESHOLD;
98 io_threshold.bf.input_threshold = PPC4XX_INPUT_THRESHOLD;
99 writel(io_threshold.w, dev->ce_base + CRYPTO4XX_IO_THRESHOLD);
100 writel(0, dev->ce_base + CRYPTO4XX_PDR_BASE_UADDR);
101 writel(0, dev->ce_base + CRYPTO4XX_RDR_BASE_UADDR);
102 writel(0, dev->ce_base + CRYPTO4XX_PKT_SRC_UADDR);
103 writel(0, dev->ce_base + CRYPTO4XX_PKT_DEST_UADDR);
104 writel(0, dev->ce_base + CRYPTO4XX_SA_UADDR);
105 writel(0, dev->ce_base + CRYPTO4XX_GATH_RING_BASE_UADDR);
106 writel(0, dev->ce_base + CRYPTO4XX_SCAT_RING_BASE_UADDR);
107 /* un reset pe,sg and pdr */
108 pe_dma_cfg.bf.pe_mode = 1;
109 pe_dma_cfg.bf.reset_sg = 0;
110 pe_dma_cfg.bf.reset_pdr = 0;
111 pe_dma_cfg.bf.reset_pe = 0;
112 pe_dma_cfg.bf.bo_td_en = 0;
113 writel(pe_dma_cfg.w, dev->ce_base + CRYPTO4XX_PE_DMA_CFG);
114 /*clear all pending interrupt*/
115 writel(PPC4XX_INTERRUPT_CLR, dev->ce_base + CRYPTO4XX_INT_CLR);
116 writel(PPC4XX_INT_DESCR_CNT, dev->ce_base + CRYPTO4XX_INT_DESCR_CNT);
117 writel(PPC4XX_INT_DESCR_CNT, dev->ce_base + CRYPTO4XX_INT_DESCR_CNT);
118 writel(PPC4XX_INT_CFG, dev->ce_base + CRYPTO4XX_INT_CFG);
119 writel(PPC4XX_PD_DONE_INT, dev->ce_base + CRYPTO4XX_INT_EN);
120}
121
122int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size)
123{
124 ctx->sa_in = dma_alloc_coherent(ctx->dev->core_dev->device, size * 4,
125 &ctx->sa_in_dma_addr, GFP_ATOMIC);
126 if (ctx->sa_in == NULL)
127 return -ENOMEM;
128
129 ctx->sa_out = dma_alloc_coherent(ctx->dev->core_dev->device, size * 4,
130 &ctx->sa_out_dma_addr, GFP_ATOMIC);
131 if (ctx->sa_out == NULL) {
132 dma_free_coherent(ctx->dev->core_dev->device,
133 ctx->sa_len * 4,
134 ctx->sa_in, ctx->sa_in_dma_addr);
135 return -ENOMEM;
136 }
137
138 memset(ctx->sa_in, 0, size * 4);
139 memset(ctx->sa_out, 0, size * 4);
140 ctx->sa_len = size;
141
142 return 0;
143}
144
145void crypto4xx_free_sa(struct crypto4xx_ctx *ctx)
146{
147 if (ctx->sa_in != NULL)
148 dma_free_coherent(ctx->dev->core_dev->device, ctx->sa_len * 4,
149 ctx->sa_in, ctx->sa_in_dma_addr);
150 if (ctx->sa_out != NULL)
151 dma_free_coherent(ctx->dev->core_dev->device, ctx->sa_len * 4,
152 ctx->sa_out, ctx->sa_out_dma_addr);
153
154 ctx->sa_in_dma_addr = 0;
155 ctx->sa_out_dma_addr = 0;
156 ctx->sa_len = 0;
157}
158
159u32 crypto4xx_alloc_state_record(struct crypto4xx_ctx *ctx)
160{
161 ctx->state_record = dma_alloc_coherent(ctx->dev->core_dev->device,
162 sizeof(struct sa_state_record),
163 &ctx->state_record_dma_addr, GFP_ATOMIC);
164 if (!ctx->state_record_dma_addr)
165 return -ENOMEM;
166 memset(ctx->state_record, 0, sizeof(struct sa_state_record));
167
168 return 0;
169}
170
171void crypto4xx_free_state_record(struct crypto4xx_ctx *ctx)
172{
173 if (ctx->state_record != NULL)
174 dma_free_coherent(ctx->dev->core_dev->device,
175 sizeof(struct sa_state_record),
176 ctx->state_record,
177 ctx->state_record_dma_addr);
178 ctx->state_record_dma_addr = 0;
179}
180
181/**
182 * alloc memory for the gather ring
183 * no need to alloc buf for the ring
184 * gdr_tail, gdr_head and gdr_count are initialized by this function
185 */
186static u32 crypto4xx_build_pdr(struct crypto4xx_device *dev)
187{
188 int i;
189 struct pd_uinfo *pd_uinfo;
190 dev->pdr = dma_alloc_coherent(dev->core_dev->device,
191 sizeof(struct ce_pd) * PPC4XX_NUM_PD,
192 &dev->pdr_pa, GFP_ATOMIC);
193 if (!dev->pdr)
194 return -ENOMEM;
195
196 dev->pdr_uinfo = kzalloc(sizeof(struct pd_uinfo) * PPC4XX_NUM_PD,
197 GFP_KERNEL);
198 if (!dev->pdr_uinfo) {
199 dma_free_coherent(dev->core_dev->device,
200 sizeof(struct ce_pd) * PPC4XX_NUM_PD,
201 dev->pdr,
202 dev->pdr_pa);
203 return -ENOMEM;
204 }
205 memset(dev->pdr, 0, sizeof(struct ce_pd) * PPC4XX_NUM_PD);
206 dev->shadow_sa_pool = dma_alloc_coherent(dev->core_dev->device,
207 256 * PPC4XX_NUM_PD,
208 &dev->shadow_sa_pool_pa,
209 GFP_ATOMIC);
210 if (!dev->shadow_sa_pool)
211 return -ENOMEM;
212
213 dev->shadow_sr_pool = dma_alloc_coherent(dev->core_dev->device,
214 sizeof(struct sa_state_record) * PPC4XX_NUM_PD,
215 &dev->shadow_sr_pool_pa, GFP_ATOMIC);
216 if (!dev->shadow_sr_pool)
217 return -ENOMEM;
218 for (i = 0; i < PPC4XX_NUM_PD; i++) {
219 pd_uinfo = (struct pd_uinfo *) (dev->pdr_uinfo +
220 sizeof(struct pd_uinfo) * i);
221
222 /* alloc 256 bytes which is enough for any kind of dynamic sa */
223 pd_uinfo->sa_va = dev->shadow_sa_pool + 256 * i;
224 pd_uinfo->sa_pa = dev->shadow_sa_pool_pa + 256 * i;
225
226 /* alloc state record */
227 pd_uinfo->sr_va = dev->shadow_sr_pool +
228 sizeof(struct sa_state_record) * i;
229 pd_uinfo->sr_pa = dev->shadow_sr_pool_pa +
230 sizeof(struct sa_state_record) * i;
231 }
232
233 return 0;
234}
235
236static void crypto4xx_destroy_pdr(struct crypto4xx_device *dev)
237{
238 if (dev->pdr != NULL)
239 dma_free_coherent(dev->core_dev->device,
240 sizeof(struct ce_pd) * PPC4XX_NUM_PD,
241 dev->pdr, dev->pdr_pa);
242 if (dev->shadow_sa_pool)
243 dma_free_coherent(dev->core_dev->device, 256 * PPC4XX_NUM_PD,
244 dev->shadow_sa_pool, dev->shadow_sa_pool_pa);
245 if (dev->shadow_sr_pool)
246 dma_free_coherent(dev->core_dev->device,
247 sizeof(struct sa_state_record) * PPC4XX_NUM_PD,
248 dev->shadow_sr_pool, dev->shadow_sr_pool_pa);
249
250 kfree(dev->pdr_uinfo);
251}
252
253static u32 crypto4xx_get_pd_from_pdr_nolock(struct crypto4xx_device *dev)
254{
255 u32 retval;
256 u32 tmp;
257
258 retval = dev->pdr_head;
259 tmp = (dev->pdr_head + 1) % PPC4XX_NUM_PD;
260
261 if (tmp == dev->pdr_tail)
262 return ERING_WAS_FULL;
263
264 dev->pdr_head = tmp;
265
266 return retval;
267}
268
269static u32 crypto4xx_put_pd_to_pdr(struct crypto4xx_device *dev, u32 idx)
270{
271 struct pd_uinfo *pd_uinfo;
272 unsigned long flags;
273
274 pd_uinfo = (struct pd_uinfo *)(dev->pdr_uinfo +
275 sizeof(struct pd_uinfo) * idx);
276 spin_lock_irqsave(&dev->core_dev->lock, flags);
277 if (dev->pdr_tail != PPC4XX_LAST_PD)
278 dev->pdr_tail++;
279 else
280 dev->pdr_tail = 0;
281 pd_uinfo->state = PD_ENTRY_FREE;
282 spin_unlock_irqrestore(&dev->core_dev->lock, flags);
283
284 return 0;
285}
286
287static struct ce_pd *crypto4xx_get_pdp(struct crypto4xx_device *dev,
288 dma_addr_t *pd_dma, u32 idx)
289{
290 *pd_dma = dev->pdr_pa + sizeof(struct ce_pd) * idx;
291
292 return dev->pdr + sizeof(struct ce_pd) * idx;
293}
294
295/**
296 * alloc memory for the gather ring
297 * no need to alloc buf for the ring
298 * gdr_tail, gdr_head and gdr_count are initialized by this function
299 */
300static u32 crypto4xx_build_gdr(struct crypto4xx_device *dev)
301{
302 dev->gdr = dma_alloc_coherent(dev->core_dev->device,
303 sizeof(struct ce_gd) * PPC4XX_NUM_GD,
304 &dev->gdr_pa, GFP_ATOMIC);
305 if (!dev->gdr)
306 return -ENOMEM;
307
308 memset(dev->gdr, 0, sizeof(struct ce_gd) * PPC4XX_NUM_GD);
309
310 return 0;
311}
312
313static inline void crypto4xx_destroy_gdr(struct crypto4xx_device *dev)
314{
315 dma_free_coherent(dev->core_dev->device,
316 sizeof(struct ce_gd) * PPC4XX_NUM_GD,
317 dev->gdr, dev->gdr_pa);
318}
319
320/*
321 * when this function is called.
322 * preemption or interrupt must be disabled
323 */
324u32 crypto4xx_get_n_gd(struct crypto4xx_device *dev, int n)
325{
326 u32 retval;
327 u32 tmp;
328 if (n >= PPC4XX_NUM_GD)
329 return ERING_WAS_FULL;
330
331 retval = dev->gdr_head;
332 tmp = (dev->gdr_head + n) % PPC4XX_NUM_GD;
333 if (dev->gdr_head > dev->gdr_tail) {
334 if (tmp < dev->gdr_head && tmp >= dev->gdr_tail)
335 return ERING_WAS_FULL;
336 } else if (dev->gdr_head < dev->gdr_tail) {
337 if (tmp < dev->gdr_head || tmp >= dev->gdr_tail)
338 return ERING_WAS_FULL;
339 }
340 dev->gdr_head = tmp;
341
342 return retval;
343}
344
345static u32 crypto4xx_put_gd_to_gdr(struct crypto4xx_device *dev)
346{
347 unsigned long flags;
348
349 spin_lock_irqsave(&dev->core_dev->lock, flags);
350 if (dev->gdr_tail == dev->gdr_head) {
351 spin_unlock_irqrestore(&dev->core_dev->lock, flags);
352 return 0;
353 }
354
355 if (dev->gdr_tail != PPC4XX_LAST_GD)
356 dev->gdr_tail++;
357 else
358 dev->gdr_tail = 0;
359
360 spin_unlock_irqrestore(&dev->core_dev->lock, flags);
361
362 return 0;
363}
364
365static inline struct ce_gd *crypto4xx_get_gdp(struct crypto4xx_device *dev,
366 dma_addr_t *gd_dma, u32 idx)
367{
368 *gd_dma = dev->gdr_pa + sizeof(struct ce_gd) * idx;
369
370 return (struct ce_gd *) (dev->gdr + sizeof(struct ce_gd) * idx);
371}
372
373/**
374 * alloc memory for the scatter ring
375 * need to alloc buf for the ring
376 * sdr_tail, sdr_head and sdr_count are initialized by this function
377 */
378static u32 crypto4xx_build_sdr(struct crypto4xx_device *dev)
379{
380 int i;
381 struct ce_sd *sd_array;
382
383 /* alloc memory for scatter descriptor ring */
384 dev->sdr = dma_alloc_coherent(dev->core_dev->device,
385 sizeof(struct ce_sd) * PPC4XX_NUM_SD,
386 &dev->sdr_pa, GFP_ATOMIC);
387 if (!dev->sdr)
388 return -ENOMEM;
389
390 dev->scatter_buffer_size = PPC4XX_SD_BUFFER_SIZE;
391 dev->scatter_buffer_va =
392 dma_alloc_coherent(dev->core_dev->device,
393 dev->scatter_buffer_size * PPC4XX_NUM_SD,
394 &dev->scatter_buffer_pa, GFP_ATOMIC);
395 if (!dev->scatter_buffer_va) {
396 dma_free_coherent(dev->core_dev->device,
397 sizeof(struct ce_sd) * PPC4XX_NUM_SD,
398 dev->sdr, dev->sdr_pa);
399 return -ENOMEM;
400 }
401
402 sd_array = dev->sdr;
403
404 for (i = 0; i < PPC4XX_NUM_SD; i++) {
405 sd_array[i].ptr = dev->scatter_buffer_pa +
406 dev->scatter_buffer_size * i;
407 }
408
409 return 0;
410}
411
412static void crypto4xx_destroy_sdr(struct crypto4xx_device *dev)
413{
414 if (dev->sdr != NULL)
415 dma_free_coherent(dev->core_dev->device,
416 sizeof(struct ce_sd) * PPC4XX_NUM_SD,
417 dev->sdr, dev->sdr_pa);
418
419 if (dev->scatter_buffer_va != NULL)
420 dma_free_coherent(dev->core_dev->device,
421 dev->scatter_buffer_size * PPC4XX_NUM_SD,
422 dev->scatter_buffer_va,
423 dev->scatter_buffer_pa);
424}
425
426/*
427 * when this function is called.
428 * preemption or interrupt must be disabled
429 */
430static u32 crypto4xx_get_n_sd(struct crypto4xx_device *dev, int n)
431{
432 u32 retval;
433 u32 tmp;
434
435 if (n >= PPC4XX_NUM_SD)
436 return ERING_WAS_FULL;
437
438 retval = dev->sdr_head;
439 tmp = (dev->sdr_head + n) % PPC4XX_NUM_SD;
440 if (dev->sdr_head > dev->gdr_tail) {
441 if (tmp < dev->sdr_head && tmp >= dev->sdr_tail)
442 return ERING_WAS_FULL;
443 } else if (dev->sdr_head < dev->sdr_tail) {
444 if (tmp < dev->sdr_head || tmp >= dev->sdr_tail)
445 return ERING_WAS_FULL;
446 } /* the head = tail, or empty case is already take cared */
447 dev->sdr_head = tmp;
448
449 return retval;
450}
451
452static u32 crypto4xx_put_sd_to_sdr(struct crypto4xx_device *dev)
453{
454 unsigned long flags;
455
456 spin_lock_irqsave(&dev->core_dev->lock, flags);
457 if (dev->sdr_tail == dev->sdr_head) {
458 spin_unlock_irqrestore(&dev->core_dev->lock, flags);
459 return 0;
460 }
461 if (dev->sdr_tail != PPC4XX_LAST_SD)
462 dev->sdr_tail++;
463 else
464 dev->sdr_tail = 0;
465 spin_unlock_irqrestore(&dev->core_dev->lock, flags);
466
467 return 0;
468}
469
470static inline struct ce_sd *crypto4xx_get_sdp(struct crypto4xx_device *dev,
471 dma_addr_t *sd_dma, u32 idx)
472{
473 *sd_dma = dev->sdr_pa + sizeof(struct ce_sd) * idx;
474
475 return (struct ce_sd *)(dev->sdr + sizeof(struct ce_sd) * idx);
476}
477
478static u32 crypto4xx_fill_one_page(struct crypto4xx_device *dev,
479 dma_addr_t *addr, u32 *length,
480 u32 *idx, u32 *offset, u32 *nbytes)
481{
482 u32 len;
483
484 if (*length > dev->scatter_buffer_size) {
485 memcpy(phys_to_virt(*addr),
486 dev->scatter_buffer_va +
487 *idx * dev->scatter_buffer_size + *offset,
488 dev->scatter_buffer_size);
489 *offset = 0;
490 *length -= dev->scatter_buffer_size;
491 *nbytes -= dev->scatter_buffer_size;
492 if (*idx == PPC4XX_LAST_SD)
493 *idx = 0;
494 else
495 (*idx)++;
496 *addr = *addr + dev->scatter_buffer_size;
497 return 1;
498 } else if (*length < dev->scatter_buffer_size) {
499 memcpy(phys_to_virt(*addr),
500 dev->scatter_buffer_va +
501 *idx * dev->scatter_buffer_size + *offset, *length);
502 if ((*offset + *length) == dev->scatter_buffer_size) {
503 if (*idx == PPC4XX_LAST_SD)
504 *idx = 0;
505 else
506 (*idx)++;
507 *nbytes -= *length;
508 *offset = 0;
509 } else {
510 *nbytes -= *length;
511 *offset += *length;
512 }
513
514 return 0;
515 } else {
516 len = (*nbytes <= dev->scatter_buffer_size) ?
517 (*nbytes) : dev->scatter_buffer_size;
518 memcpy(phys_to_virt(*addr),
519 dev->scatter_buffer_va +
520 *idx * dev->scatter_buffer_size + *offset,
521 len);
522 *offset = 0;
523 *nbytes -= len;
524
525 if (*idx == PPC4XX_LAST_SD)
526 *idx = 0;
527 else
528 (*idx)++;
529
530 return 0;
531 }
532}
533
534static void crypto4xx_copy_pkt_to_dst(struct crypto4xx_device *dev,
535 struct ce_pd *pd,
536 struct pd_uinfo *pd_uinfo,
537 u32 nbytes,
538 struct scatterlist *dst)
539{
540 dma_addr_t addr;
541 u32 this_sd;
542 u32 offset;
543 u32 len;
544 u32 i;
545 u32 sg_len;
546 struct scatterlist *sg;
547
548 this_sd = pd_uinfo->first_sd;
549 offset = 0;
550 i = 0;
551
552 while (nbytes) {
553 sg = &dst[i];
554 sg_len = sg->length;
555 addr = dma_map_page(dev->core_dev->device, sg_page(sg),
556 sg->offset, sg->length, DMA_TO_DEVICE);
557
558 if (offset == 0) {
559 len = (nbytes <= sg->length) ? nbytes : sg->length;
560 while (crypto4xx_fill_one_page(dev, &addr, &len,
561 &this_sd, &offset, &nbytes))
562 ;
563 if (!nbytes)
564 return;
565 i++;
566 } else {
567 len = (nbytes <= (dev->scatter_buffer_size - offset)) ?
568 nbytes : (dev->scatter_buffer_size - offset);
569 len = (sg->length < len) ? sg->length : len;
570 while (crypto4xx_fill_one_page(dev, &addr, &len,
571 &this_sd, &offset, &nbytes))
572 ;
573 if (!nbytes)
574 return;
575 sg_len -= len;
576 if (sg_len) {
577 addr += len;
578 while (crypto4xx_fill_one_page(dev, &addr,
579 &sg_len, &this_sd, &offset, &nbytes))
580 ;
581 }
582 i++;
583 }
584 }
585}
586
587static u32 crypto4xx_copy_digest_to_dst(struct pd_uinfo *pd_uinfo,
588 struct crypto4xx_ctx *ctx)
589{
590 struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *) ctx->sa_in;
591 struct sa_state_record *state_record =
592 (struct sa_state_record *) pd_uinfo->sr_va;
593
594 if (sa->sa_command_0.bf.hash_alg == SA_HASH_ALG_SHA1) {
595 memcpy((void *) pd_uinfo->dest_va, state_record->save_digest,
596 SA_HASH_ALG_SHA1_DIGEST_SIZE);
597 }
598
599 return 0;
600}
601
602static void crypto4xx_ret_sg_desc(struct crypto4xx_device *dev,
603 struct pd_uinfo *pd_uinfo)
604{
605 int i;
606 if (pd_uinfo->num_gd) {
607 for (i = 0; i < pd_uinfo->num_gd; i++)
608 crypto4xx_put_gd_to_gdr(dev);
609 pd_uinfo->first_gd = 0xffffffff;
610 pd_uinfo->num_gd = 0;
611 }
612 if (pd_uinfo->num_sd) {
613 for (i = 0; i < pd_uinfo->num_sd; i++)
614 crypto4xx_put_sd_to_sdr(dev);
615
616 pd_uinfo->first_sd = 0xffffffff;
617 pd_uinfo->num_sd = 0;
618 }
619}
620
621static u32 crypto4xx_ablkcipher_done(struct crypto4xx_device *dev,
622 struct pd_uinfo *pd_uinfo,
623 struct ce_pd *pd)
624{
625 struct crypto4xx_ctx *ctx;
626 struct ablkcipher_request *ablk_req;
627 struct scatterlist *dst;
628 dma_addr_t addr;
629
630 ablk_req = ablkcipher_request_cast(pd_uinfo->async_req);
631 ctx = crypto_tfm_ctx(ablk_req->base.tfm);
632
633 if (pd_uinfo->using_sd) {
634 crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo, ablk_req->nbytes,
635 ablk_req->dst);
636 } else {
637 dst = pd_uinfo->dest_va;
638 addr = dma_map_page(dev->core_dev->device, sg_page(dst),
639 dst->offset, dst->length, DMA_FROM_DEVICE);
640 }
641 crypto4xx_ret_sg_desc(dev, pd_uinfo);
642 if (ablk_req->base.complete != NULL)
643 ablk_req->base.complete(&ablk_req->base, 0);
644
645 return 0;
646}
647
648static u32 crypto4xx_ahash_done(struct crypto4xx_device *dev,
649 struct pd_uinfo *pd_uinfo)
650{
651 struct crypto4xx_ctx *ctx;
652 struct ahash_request *ahash_req;
653
654 ahash_req = ahash_request_cast(pd_uinfo->async_req);
655 ctx = crypto_tfm_ctx(ahash_req->base.tfm);
656
657 crypto4xx_copy_digest_to_dst(pd_uinfo,
658 crypto_tfm_ctx(ahash_req->base.tfm));
659 crypto4xx_ret_sg_desc(dev, pd_uinfo);
660 /* call user provided callback function x */
661 if (ahash_req->base.complete != NULL)
662 ahash_req->base.complete(&ahash_req->base, 0);
663
664 return 0;
665}
666
667static u32 crypto4xx_pd_done(struct crypto4xx_device *dev, u32 idx)
668{
669 struct ce_pd *pd;
670 struct pd_uinfo *pd_uinfo;
671
672 pd = dev->pdr + sizeof(struct ce_pd)*idx;
673 pd_uinfo = dev->pdr_uinfo + sizeof(struct pd_uinfo)*idx;
674 if (crypto_tfm_alg_type(pd_uinfo->async_req->tfm) ==
675 CRYPTO_ALG_TYPE_ABLKCIPHER)
676 return crypto4xx_ablkcipher_done(dev, pd_uinfo, pd);
677 else
678 return crypto4xx_ahash_done(dev, pd_uinfo);
679}
680
681/**
682 * Note: Only use this function to copy items that is word aligned.
683 */
684void crypto4xx_memcpy_le(unsigned int *dst,
685 const unsigned char *buf,
686 int len)
687{
688 u8 *tmp;
689 for (; len >= 4; buf += 4, len -= 4)
690 *dst++ = cpu_to_le32(*(unsigned int *) buf);
691
692 tmp = (u8 *)dst;
693 switch (len) {
694 case 3:
695 *tmp++ = 0;
696 *tmp++ = *(buf+2);
697 *tmp++ = *(buf+1);
698 *tmp++ = *buf;
699 break;
700 case 2:
701 *tmp++ = 0;
702 *tmp++ = 0;
703 *tmp++ = *(buf+1);
704 *tmp++ = *buf;
705 break;
706 case 1:
707 *tmp++ = 0;
708 *tmp++ = 0;
709 *tmp++ = 0;
710 *tmp++ = *buf;
711 break;
712 default:
713 break;
714 }
715}
716
717static void crypto4xx_stop_all(struct crypto4xx_core_device *core_dev)
718{
719 crypto4xx_destroy_pdr(core_dev->dev);
720 crypto4xx_destroy_gdr(core_dev->dev);
721 crypto4xx_destroy_sdr(core_dev->dev);
722 dev_set_drvdata(core_dev->device, NULL);
723 iounmap(core_dev->dev->ce_base);
724 kfree(core_dev->dev);
725 kfree(core_dev);
726}
727
728void crypto4xx_return_pd(struct crypto4xx_device *dev,
729 u32 pd_entry, struct ce_pd *pd,
730 struct pd_uinfo *pd_uinfo)
731{
732 /* irq should be already disabled */
733 dev->pdr_head = pd_entry;
734 pd->pd_ctl.w = 0;
735 pd->pd_ctl_len.w = 0;
736 pd_uinfo->state = PD_ENTRY_FREE;
737}
738
739/*
740 * derive number of elements in scatterlist
741 * Shamlessly copy from talitos.c
742 */
743static int get_sg_count(struct scatterlist *sg_list, int nbytes)
744{
745 struct scatterlist *sg = sg_list;
746 int sg_nents = 0;
747
748 while (nbytes) {
749 sg_nents++;
750 if (sg->length > nbytes)
751 break;
752 nbytes -= sg->length;
753 sg = sg_next(sg);
754 }
755
756 return sg_nents;
757}
758
759static u32 get_next_gd(u32 current)
760{
761 if (current != PPC4XX_LAST_GD)
762 return current + 1;
763 else
764 return 0;
765}
766
767static u32 get_next_sd(u32 current)
768{
769 if (current != PPC4XX_LAST_SD)
770 return current + 1;
771 else
772 return 0;
773}
774
775u32 crypto4xx_build_pd(struct crypto_async_request *req,
776 struct crypto4xx_ctx *ctx,
777 struct scatterlist *src,
778 struct scatterlist *dst,
779 unsigned int datalen,
780 void *iv, u32 iv_len)
781{
782 struct crypto4xx_device *dev = ctx->dev;
783 dma_addr_t addr, pd_dma, sd_dma, gd_dma;
784 struct dynamic_sa_ctl *sa;
785 struct scatterlist *sg;
786 struct ce_gd *gd;
787 struct ce_pd *pd;
788 u32 num_gd, num_sd;
789 u32 fst_gd = 0xffffffff;
790 u32 fst_sd = 0xffffffff;
791 u32 pd_entry;
792 unsigned long flags;
793 struct pd_uinfo *pd_uinfo = NULL;
794 unsigned int nbytes = datalen, idx;
795 unsigned int ivlen = 0;
796 u32 gd_idx = 0;
797
798 /* figure how many gd is needed */
799 num_gd = get_sg_count(src, datalen);
800 if (num_gd == 1)
801 num_gd = 0;
802
803 /* figure how many sd is needed */
804 if (sg_is_last(dst) || ctx->is_hash) {
805 num_sd = 0;
806 } else {
807 if (datalen > PPC4XX_SD_BUFFER_SIZE) {
808 num_sd = datalen / PPC4XX_SD_BUFFER_SIZE;
809 if (datalen % PPC4XX_SD_BUFFER_SIZE)
810 num_sd++;
811 } else {
812 num_sd = 1;
813 }
814 }
815
816 /*
817 * The follow section of code needs to be protected
818 * The gather ring and scatter ring needs to be consecutive
819 * In case of run out of any kind of descriptor, the descriptor
820 * already got must be return the original place.
821 */
822 spin_lock_irqsave(&dev->core_dev->lock, flags);
823 if (num_gd) {
824 fst_gd = crypto4xx_get_n_gd(dev, num_gd);
825 if (fst_gd == ERING_WAS_FULL) {
826 spin_unlock_irqrestore(&dev->core_dev->lock, flags);
827 return -EAGAIN;
828 }
829 }
830 if (num_sd) {
831 fst_sd = crypto4xx_get_n_sd(dev, num_sd);
832 if (fst_sd == ERING_WAS_FULL) {
833 if (num_gd)
834 dev->gdr_head = fst_gd;
835 spin_unlock_irqrestore(&dev->core_dev->lock, flags);
836 return -EAGAIN;
837 }
838 }
839 pd_entry = crypto4xx_get_pd_from_pdr_nolock(dev);
840 if (pd_entry == ERING_WAS_FULL) {
841 if (num_gd)
842 dev->gdr_head = fst_gd;
843 if (num_sd)
844 dev->sdr_head = fst_sd;
845 spin_unlock_irqrestore(&dev->core_dev->lock, flags);
846 return -EAGAIN;
847 }
848 spin_unlock_irqrestore(&dev->core_dev->lock, flags);
849
850 pd_uinfo = (struct pd_uinfo *)(dev->pdr_uinfo +
851 sizeof(struct pd_uinfo) * pd_entry);
852 pd = crypto4xx_get_pdp(dev, &pd_dma, pd_entry);
853 pd_uinfo->async_req = req;
854 pd_uinfo->num_gd = num_gd;
855 pd_uinfo->num_sd = num_sd;
856
857 if (iv_len || ctx->is_hash) {
858 ivlen = iv_len;
859 pd->sa = pd_uinfo->sa_pa;
860 sa = (struct dynamic_sa_ctl *) pd_uinfo->sa_va;
861 if (ctx->direction == DIR_INBOUND)
862 memcpy(sa, ctx->sa_in, ctx->sa_len * 4);
863 else
864 memcpy(sa, ctx->sa_out, ctx->sa_len * 4);
865
866 memcpy((void *) sa + ctx->offset_to_sr_ptr,
867 &pd_uinfo->sr_pa, 4);
868
869 if (iv_len)
870 crypto4xx_memcpy_le(pd_uinfo->sr_va, iv, iv_len);
871 } else {
872 if (ctx->direction == DIR_INBOUND) {
873 pd->sa = ctx->sa_in_dma_addr;
874 sa = (struct dynamic_sa_ctl *) ctx->sa_in;
875 } else {
876 pd->sa = ctx->sa_out_dma_addr;
877 sa = (struct dynamic_sa_ctl *) ctx->sa_out;
878 }
879 }
880 pd->sa_len = ctx->sa_len;
881 if (num_gd) {
882 /* get first gd we are going to use */
883 gd_idx = fst_gd;
884 pd_uinfo->first_gd = fst_gd;
885 pd_uinfo->num_gd = num_gd;
886 gd = crypto4xx_get_gdp(dev, &gd_dma, gd_idx);
887 pd->src = gd_dma;
888 /* enable gather */
889 sa->sa_command_0.bf.gather = 1;
890 idx = 0;
891 src = &src[0];
892 /* walk the sg, and setup gather array */
893 while (nbytes) {
894 sg = &src[idx];
895 addr = dma_map_page(dev->core_dev->device, sg_page(sg),
896 sg->offset, sg->length, DMA_TO_DEVICE);
897 gd->ptr = addr;
898 gd->ctl_len.len = sg->length;
899 gd->ctl_len.done = 0;
900 gd->ctl_len.ready = 1;
901 if (sg->length >= nbytes)
902 break;
903 nbytes -= sg->length;
904 gd_idx = get_next_gd(gd_idx);
905 gd = crypto4xx_get_gdp(dev, &gd_dma, gd_idx);
906 idx++;
907 }
908 } else {
909 pd->src = (u32)dma_map_page(dev->core_dev->device, sg_page(src),
910 src->offset, src->length, DMA_TO_DEVICE);
911 /*
912 * Disable gather in sa command
913 */
914 sa->sa_command_0.bf.gather = 0;
915 /*
916 * Indicate gather array is not used
917 */
918 pd_uinfo->first_gd = 0xffffffff;
919 pd_uinfo->num_gd = 0;
920 }
921 if (ctx->is_hash || sg_is_last(dst)) {
922 /*
923 * we know application give us dst a whole piece of memory
924 * no need to use scatter ring.
925 * In case of is_hash, the icv is always at end of src data.
926 */
927 pd_uinfo->using_sd = 0;
928 pd_uinfo->first_sd = 0xffffffff;
929 pd_uinfo->num_sd = 0;
930 pd_uinfo->dest_va = dst;
931 sa->sa_command_0.bf.scatter = 0;
932 if (ctx->is_hash)
933 pd->dest = virt_to_phys((void *)dst);
934 else
935 pd->dest = (u32)dma_map_page(dev->core_dev->device,
936 sg_page(dst), dst->offset,
937 dst->length, DMA_TO_DEVICE);
938 } else {
939 struct ce_sd *sd = NULL;
940 u32 sd_idx = fst_sd;
941 nbytes = datalen;
942 sa->sa_command_0.bf.scatter = 1;
943 pd_uinfo->using_sd = 1;
944 pd_uinfo->dest_va = dst;
945 pd_uinfo->first_sd = fst_sd;
946 pd_uinfo->num_sd = num_sd;
947 sd = crypto4xx_get_sdp(dev, &sd_dma, sd_idx);
948 pd->dest = sd_dma;
949 /* setup scatter descriptor */
950 sd->ctl.done = 0;
951 sd->ctl.rdy = 1;
952 /* sd->ptr should be setup by sd_init routine*/
953 idx = 0;
954 if (nbytes >= PPC4XX_SD_BUFFER_SIZE)
955 nbytes -= PPC4XX_SD_BUFFER_SIZE;
956 else
957 nbytes = 0;
958 while (nbytes) {
959 sd_idx = get_next_sd(sd_idx);
960 sd = crypto4xx_get_sdp(dev, &sd_dma, sd_idx);
961 /* setup scatter descriptor */
962 sd->ctl.done = 0;
963 sd->ctl.rdy = 1;
964 if (nbytes >= PPC4XX_SD_BUFFER_SIZE)
965 nbytes -= PPC4XX_SD_BUFFER_SIZE;
966 else
967 /*
968 * SD entry can hold PPC4XX_SD_BUFFER_SIZE,
969 * which is more than nbytes, so done.
970 */
971 nbytes = 0;
972 }
973 }
974
975 sa->sa_command_1.bf.hash_crypto_offset = 0;
976 pd->pd_ctl.w = ctx->pd_ctl;
977 pd->pd_ctl_len.w = 0x00400000 | (ctx->bypass << 24) | datalen;
978 pd_uinfo->state = PD_ENTRY_INUSE;
979 wmb();
980 /* write any value to push engine to read a pd */
981 writel(1, dev->ce_base + CRYPTO4XX_INT_DESCR_RD);
982 return -EINPROGRESS;
983}
984
985/**
986 * Algorithm Registration Functions
987 */
988static int crypto4xx_alg_init(struct crypto_tfm *tfm)
989{
990 struct crypto_alg *alg = tfm->__crt_alg;
991 struct crypto4xx_alg *amcc_alg = crypto_alg_to_crypto4xx_alg(alg);
992 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
993
994 ctx->dev = amcc_alg->dev;
995 ctx->sa_in = NULL;
996 ctx->sa_out = NULL;
997 ctx->sa_in_dma_addr = 0;
998 ctx->sa_out_dma_addr = 0;
999 ctx->sa_len = 0;
1000
1001 if (alg->cra_type == &crypto_ablkcipher_type)
1002 tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx);
1003 else if (alg->cra_type == &crypto_ahash_type)
1004 tfm->crt_ahash.reqsize = sizeof(struct crypto4xx_ctx);
1005
1006 return 0;
1007}
1008
1009static void crypto4xx_alg_exit(struct crypto_tfm *tfm)
1010{
1011 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
1012
1013 crypto4xx_free_sa(ctx);
1014 crypto4xx_free_state_record(ctx);
1015}
1016
1017int crypto4xx_register_alg(struct crypto4xx_device *sec_dev,
1018 struct crypto_alg *crypto_alg, int array_size)
1019{
1020 struct crypto4xx_alg *alg;
1021 int i;
1022 int rc = 0;
1023
1024 for (i = 0; i < array_size; i++) {
1025 alg = kzalloc(sizeof(struct crypto4xx_alg), GFP_KERNEL);
1026 if (!alg)
1027 return -ENOMEM;
1028
1029 alg->alg = crypto_alg[i];
1030 INIT_LIST_HEAD(&alg->alg.cra_list);
1031 if (alg->alg.cra_init == NULL)
1032 alg->alg.cra_init = crypto4xx_alg_init;
1033 if (alg->alg.cra_exit == NULL)
1034 alg->alg.cra_exit = crypto4xx_alg_exit;
1035 alg->dev = sec_dev;
1036 rc = crypto_register_alg(&alg->alg);
1037 if (rc) {
1038 list_del(&alg->entry);
1039 kfree(alg);
1040 } else {
1041 list_add_tail(&alg->entry, &sec_dev->alg_list);
1042 }
1043 }
1044
1045 return 0;
1046}
1047
1048static void crypto4xx_unregister_alg(struct crypto4xx_device *sec_dev)
1049{
1050 struct crypto4xx_alg *alg, *tmp;
1051
1052 list_for_each_entry_safe(alg, tmp, &sec_dev->alg_list, entry) {
1053 list_del(&alg->entry);
1054 crypto_unregister_alg(&alg->alg);
1055 kfree(alg);
1056 }
1057}
1058
1059static void crypto4xx_bh_tasklet_cb(unsigned long data)
1060{
1061 struct device *dev = (struct device *)data;
1062 struct crypto4xx_core_device *core_dev = dev_get_drvdata(dev);
1063 struct pd_uinfo *pd_uinfo;
1064 struct ce_pd *pd;
1065 u32 tail;
1066
1067 while (core_dev->dev->pdr_head != core_dev->dev->pdr_tail) {
1068 tail = core_dev->dev->pdr_tail;
1069 pd_uinfo = core_dev->dev->pdr_uinfo +
1070 sizeof(struct pd_uinfo)*tail;
1071 pd = core_dev->dev->pdr + sizeof(struct ce_pd) * tail;
1072 if ((pd_uinfo->state == PD_ENTRY_INUSE) &&
1073 pd->pd_ctl.bf.pe_done &&
1074 !pd->pd_ctl.bf.host_ready) {
1075 pd->pd_ctl.bf.pe_done = 0;
1076 crypto4xx_pd_done(core_dev->dev, tail);
1077 crypto4xx_put_pd_to_pdr(core_dev->dev, tail);
1078 pd_uinfo->state = PD_ENTRY_FREE;
1079 } else {
1080 /* if tail not done, break */
1081 break;
1082 }
1083 }
1084}
1085
1086/**
1087 * Top Half of isr.
1088 */
1089static irqreturn_t crypto4xx_ce_interrupt_handler(int irq, void *data)
1090{
1091 struct device *dev = (struct device *)data;
1092 struct crypto4xx_core_device *core_dev = dev_get_drvdata(dev);
1093
1094 if (core_dev->dev->ce_base == 0)
1095 return 0;
1096
1097 writel(PPC4XX_INTERRUPT_CLR,
1098 core_dev->dev->ce_base + CRYPTO4XX_INT_CLR);
1099 tasklet_schedule(&core_dev->tasklet);
1100
1101 return IRQ_HANDLED;
1102}
1103
1104/**
1105 * Supported Crypto Algorithms
1106 */
1107struct crypto_alg crypto4xx_alg[] = {
1108 /* Crypto AES modes */
1109 {
1110 .cra_name = "cbc(aes)",
1111 .cra_driver_name = "cbc-aes-ppc4xx",
1112 .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
1113 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1114 .cra_blocksize = AES_BLOCK_SIZE,
1115 .cra_ctxsize = sizeof(struct crypto4xx_ctx),
1116 .cra_alignmask = 0,
1117 .cra_type = &crypto_ablkcipher_type,
1118 .cra_module = THIS_MODULE,
1119 .cra_u = {
1120 .ablkcipher = {
1121 .min_keysize = AES_MIN_KEY_SIZE,
1122 .max_keysize = AES_MAX_KEY_SIZE,
1123 .ivsize = AES_IV_SIZE,
1124 .setkey = crypto4xx_setkey_aes_cbc,
1125 .encrypt = crypto4xx_encrypt,
1126 .decrypt = crypto4xx_decrypt,
1127 }
1128 }
1129 },
1130 /* Hash SHA1 */
1131 {
1132 .cra_name = "sha1",
1133 .cra_driver_name = "sha1-ppc4xx",
1134 .cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
1135 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
1136 .cra_blocksize = SHA1_BLOCK_SIZE,
1137 .cra_ctxsize = sizeof(struct crypto4xx_ctx),
1138 .cra_alignmask = 0,
1139 .cra_type = &crypto_ahash_type,
1140 .cra_init = crypto4xx_sha1_alg_init,
1141 .cra_module = THIS_MODULE,
1142 .cra_u = {
1143 .ahash = {
1144 .digestsize = SHA1_DIGEST_SIZE,
1145 .init = crypto4xx_hash_init,
1146 .update = crypto4xx_hash_update,
1147 .final = crypto4xx_hash_final,
1148 .digest = crypto4xx_hash_digest,
1149 }
1150 }
1151 },
1152};
1153
1154/**
1155 * Module Initialization Routine
1156 */
1157static int __init crypto4xx_probe(struct of_device *ofdev,
1158 const struct of_device_id *match)
1159{
1160 int rc;
1161 struct resource res;
1162 struct device *dev = &ofdev->dev;
1163 struct crypto4xx_core_device *core_dev;
1164
1165 rc = of_address_to_resource(ofdev->node, 0, &res);
1166 if (rc)
1167 return -ENODEV;
1168
1169 if (of_find_compatible_node(NULL, NULL, "amcc,ppc460ex-crypto")) {
1170 mtdcri(SDR0, PPC460EX_SDR0_SRST,
1171 mfdcri(SDR0, PPC460EX_SDR0_SRST) | PPC460EX_CE_RESET);
1172 mtdcri(SDR0, PPC460EX_SDR0_SRST,
1173 mfdcri(SDR0, PPC460EX_SDR0_SRST) & ~PPC460EX_CE_RESET);
1174 } else if (of_find_compatible_node(NULL, NULL,
1175 "amcc,ppc405ex-crypto")) {
1176 mtdcri(SDR0, PPC405EX_SDR0_SRST,
1177 mfdcri(SDR0, PPC405EX_SDR0_SRST) | PPC405EX_CE_RESET);
1178 mtdcri(SDR0, PPC405EX_SDR0_SRST,
1179 mfdcri(SDR0, PPC405EX_SDR0_SRST) & ~PPC405EX_CE_RESET);
1180 } else if (of_find_compatible_node(NULL, NULL,
1181 "amcc,ppc460sx-crypto")) {
1182 mtdcri(SDR0, PPC460SX_SDR0_SRST,
1183 mfdcri(SDR0, PPC460SX_SDR0_SRST) | PPC460SX_CE_RESET);
1184 mtdcri(SDR0, PPC460SX_SDR0_SRST,
1185 mfdcri(SDR0, PPC460SX_SDR0_SRST) & ~PPC460SX_CE_RESET);
1186 } else {
1187 printk(KERN_ERR "Crypto Function Not supported!\n");
1188 return -EINVAL;
1189 }
1190
1191 core_dev = kzalloc(sizeof(struct crypto4xx_core_device), GFP_KERNEL);
1192 if (!core_dev)
1193 return -ENOMEM;
1194
1195 dev_set_drvdata(dev, core_dev);
1196 core_dev->ofdev = ofdev;
1197 core_dev->dev = kzalloc(sizeof(struct crypto4xx_device), GFP_KERNEL);
1198 if (!core_dev->dev)
1199 goto err_alloc_dev;
1200
1201 core_dev->dev->core_dev = core_dev;
1202 core_dev->device = dev;
1203 spin_lock_init(&core_dev->lock);
1204 INIT_LIST_HEAD(&core_dev->dev->alg_list);
1205 rc = crypto4xx_build_pdr(core_dev->dev);
1206 if (rc)
1207 goto err_build_pdr;
1208
1209 rc = crypto4xx_build_gdr(core_dev->dev);
1210 if (rc)
1211 goto err_build_gdr;
1212
1213 rc = crypto4xx_build_sdr(core_dev->dev);
1214 if (rc)
1215 goto err_build_sdr;
1216
1217 /* Init tasklet for bottom half processing */
1218 tasklet_init(&core_dev->tasklet, crypto4xx_bh_tasklet_cb,
1219 (unsigned long) dev);
1220
1221 /* Register for Crypto isr, Crypto Engine IRQ */
1222 core_dev->irq = irq_of_parse_and_map(ofdev->node, 0);
1223 rc = request_irq(core_dev->irq, crypto4xx_ce_interrupt_handler, 0,
1224 core_dev->dev->name, dev);
1225 if (rc)
1226 goto err_request_irq;
1227
1228 core_dev->dev->ce_base = of_iomap(ofdev->node, 0);
1229 if (!core_dev->dev->ce_base) {
1230 dev_err(dev, "failed to of_iomap\n");
1231 goto err_iomap;
1232 }
1233
1234 /* need to setup pdr, rdr, gdr and sdr before this */
1235 crypto4xx_hw_init(core_dev->dev);
1236
1237 /* Register security algorithms with Linux CryptoAPI */
1238 rc = crypto4xx_register_alg(core_dev->dev, crypto4xx_alg,
1239 ARRAY_SIZE(crypto4xx_alg));
1240 if (rc)
1241 goto err_start_dev;
1242
1243 return 0;
1244
1245err_start_dev:
1246 iounmap(core_dev->dev->ce_base);
1247err_iomap:
1248 free_irq(core_dev->irq, dev);
1249 irq_dispose_mapping(core_dev->irq);
1250 tasklet_kill(&core_dev->tasklet);
1251err_request_irq:
1252 crypto4xx_destroy_sdr(core_dev->dev);
1253err_build_sdr:
1254 crypto4xx_destroy_gdr(core_dev->dev);
1255err_build_gdr:
1256 crypto4xx_destroy_pdr(core_dev->dev);
1257err_build_pdr:
1258 kfree(core_dev->dev);
1259err_alloc_dev:
1260 kfree(core_dev);
1261
1262 return rc;
1263}
1264
1265static int __exit crypto4xx_remove(struct of_device *ofdev)
1266{
1267 struct device *dev = &ofdev->dev;
1268 struct crypto4xx_core_device *core_dev = dev_get_drvdata(dev);
1269
1270 free_irq(core_dev->irq, dev);
1271 irq_dispose_mapping(core_dev->irq);
1272
1273 tasklet_kill(&core_dev->tasklet);
1274 /* Un-register with Linux CryptoAPI */
1275 crypto4xx_unregister_alg(core_dev->dev);
1276 /* Free all allocated memory */
1277 crypto4xx_stop_all(core_dev);
1278
1279 return 0;
1280}
1281
1282static struct of_device_id crypto4xx_match[] = {
1283 { .compatible = "amcc,ppc4xx-crypto",},
1284 { },
1285};
1286
1287static struct of_platform_driver crypto4xx_driver = {
1288 .name = "crypto4xx",
1289 .match_table = crypto4xx_match,
1290 .probe = crypto4xx_probe,
1291 .remove = crypto4xx_remove,
1292};
1293
1294static int __init crypto4xx_init(void)
1295{
1296 return of_register_platform_driver(&crypto4xx_driver);
1297}
1298
1299static void __exit crypto4xx_exit(void)
1300{
1301 of_unregister_platform_driver(&crypto4xx_driver);
1302}
1303
1304module_init(crypto4xx_init);
1305module_exit(crypto4xx_exit);
1306
1307MODULE_LICENSE("GPL");
1308MODULE_AUTHOR("James Hsiao <jhsiao@amcc.com>");
1309MODULE_DESCRIPTION("Driver for AMCC PPC4xx crypto accelerator");
1310
diff --git a/drivers/crypto/amcc/crypto4xx_core.h b/drivers/crypto/amcc/crypto4xx_core.h
new file mode 100644
index 00000000000..1ef10344936
--- /dev/null
+++ b/drivers/crypto/amcc/crypto4xx_core.h
@@ -0,0 +1,177 @@
1/**
2 * AMCC SoC PPC4xx Crypto Driver
3 *
4 * Copyright (c) 2008 Applied Micro Circuits Corporation.
5 * All rights reserved. James Hsiao <jhsiao@amcc.com>
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * This is the header file for AMCC Crypto offload Linux device driver for
18 * use with Linux CryptoAPI.
19
20 */
21
22#ifndef __CRYPTO4XX_CORE_H__
23#define __CRYPTO4XX_CORE_H__
24
25#define PPC460SX_SDR0_SRST 0x201
26#define PPC405EX_SDR0_SRST 0x200
27#define PPC460EX_SDR0_SRST 0x201
28#define PPC460EX_CE_RESET 0x08000000
29#define PPC460SX_CE_RESET 0x20000000
30#define PPC405EX_CE_RESET 0x00000008
31
32#define CRYPTO4XX_CRYPTO_PRIORITY 300
33#define PPC4XX_LAST_PD 63
34#define PPC4XX_NUM_PD 64
35#define PPC4XX_LAST_GD 1023
36#define PPC4XX_NUM_GD 1024
37#define PPC4XX_LAST_SD 63
38#define PPC4XX_NUM_SD 64
39#define PPC4XX_SD_BUFFER_SIZE 2048
40
41#define PD_ENTRY_INUSE 1
42#define PD_ENTRY_FREE 0
43#define ERING_WAS_FULL 0xffffffff
44
45struct crypto4xx_device;
46
47struct pd_uinfo {
48 struct crypto4xx_device *dev;
49 u32 state;
50 u32 using_sd;
51 u32 first_gd; /* first gather discriptor
52 used by this packet */
53 u32 num_gd; /* number of gather discriptor
54 used by this packet */
55 u32 first_sd; /* first scatter discriptor
56 used by this packet */
57 u32 num_sd; /* number of scatter discriptors
58 used by this packet */
59 void *sa_va; /* shadow sa, when using cp from ctx->sa */
60 u32 sa_pa;
61 void *sr_va; /* state record for shadow sa */
62 u32 sr_pa;
63 struct scatterlist *dest_va;
64 struct crypto_async_request *async_req; /* base crypto request
65 for this packet */
66};
67
68struct crypto4xx_device {
69 struct crypto4xx_core_device *core_dev;
70 char *name;
71 u64 ce_phy_address;
72 void __iomem *ce_base;
73
74 void *pdr; /* base address of packet
75 descriptor ring */
76 dma_addr_t pdr_pa; /* physical address used to
77 program ce pdr_base_register */
78 void *gdr; /* gather descriptor ring */
79 dma_addr_t gdr_pa; /* physical address used to
80 program ce gdr_base_register */
81 void *sdr; /* scatter descriptor ring */
82 dma_addr_t sdr_pa; /* physical address used to
83 program ce sdr_base_register */
84 void *scatter_buffer_va;
85 dma_addr_t scatter_buffer_pa;
86 u32 scatter_buffer_size;
87
88 void *shadow_sa_pool; /* pool of memory for sa in pd_uinfo */
89 dma_addr_t shadow_sa_pool_pa;
90 void *shadow_sr_pool; /* pool of memory for sr in pd_uinfo */
91 dma_addr_t shadow_sr_pool_pa;
92 u32 pdr_tail;
93 u32 pdr_head;
94 u32 gdr_tail;
95 u32 gdr_head;
96 u32 sdr_tail;
97 u32 sdr_head;
98 void *pdr_uinfo;
99 struct list_head alg_list; /* List of algorithm supported
100 by this device */
101};
102
103struct crypto4xx_core_device {
104 struct device *device;
105 struct of_device *ofdev;
106 struct crypto4xx_device *dev;
107 u32 int_status;
108 u32 irq;
109 struct tasklet_struct tasklet;
110 spinlock_t lock;
111};
112
113struct crypto4xx_ctx {
114 struct crypto4xx_device *dev;
115 void *sa_in;
116 dma_addr_t sa_in_dma_addr;
117 void *sa_out;
118 dma_addr_t sa_out_dma_addr;
119 void *state_record;
120 dma_addr_t state_record_dma_addr;
121 u32 sa_len;
122 u32 offset_to_sr_ptr; /* offset to state ptr, in dynamic sa */
123 u32 direction;
124 u32 next_hdr;
125 u32 save_iv;
126 u32 pd_ctl_len;
127 u32 pd_ctl;
128 u32 bypass;
129 u32 is_hash;
130 u32 hash_final;
131};
132
133struct crypto4xx_req_ctx {
134 struct crypto4xx_device *dev; /* Device in which
135 operation to send to */
136 void *sa;
137 u32 sa_dma_addr;
138 u16 sa_len;
139};
140
141struct crypto4xx_alg {
142 struct list_head entry;
143 struct crypto_alg alg;
144 struct crypto4xx_device *dev;
145};
146
147#define crypto_alg_to_crypto4xx_alg(x) \
148 container_of(x, struct crypto4xx_alg, alg)
149
150extern int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size);
151extern void crypto4xx_free_sa(struct crypto4xx_ctx *ctx);
152extern u32 crypto4xx_alloc_sa_rctx(struct crypto4xx_ctx *ctx,
153 struct crypto4xx_ctx *rctx);
154extern void crypto4xx_free_sa_rctx(struct crypto4xx_ctx *rctx);
155extern void crypto4xx_free_ctx(struct crypto4xx_ctx *ctx);
156extern u32 crypto4xx_alloc_state_record(struct crypto4xx_ctx *ctx);
157extern u32 get_dynamic_sa_offset_state_ptr_field(struct crypto4xx_ctx *ctx);
158extern u32 get_dynamic_sa_offset_key_field(struct crypto4xx_ctx *ctx);
159extern u32 get_dynamic_sa_iv_size(struct crypto4xx_ctx *ctx);
160extern void crypto4xx_memcpy_le(unsigned int *dst,
161 const unsigned char *buf, int len);
162extern u32 crypto4xx_build_pd(struct crypto_async_request *req,
163 struct crypto4xx_ctx *ctx,
164 struct scatterlist *src,
165 struct scatterlist *dst,
166 unsigned int datalen,
167 void *iv, u32 iv_len);
168extern int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher,
169 const u8 *key, unsigned int keylen);
170extern int crypto4xx_encrypt(struct ablkcipher_request *req);
171extern int crypto4xx_decrypt(struct ablkcipher_request *req);
172extern int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm);
173extern int crypto4xx_hash_digest(struct ahash_request *req);
174extern int crypto4xx_hash_final(struct ahash_request *req);
175extern int crypto4xx_hash_update(struct ahash_request *req);
176extern int crypto4xx_hash_init(struct ahash_request *req);
177#endif
diff --git a/drivers/crypto/amcc/crypto4xx_reg_def.h b/drivers/crypto/amcc/crypto4xx_reg_def.h
new file mode 100644
index 00000000000..7d4edb00261
--- /dev/null
+++ b/drivers/crypto/amcc/crypto4xx_reg_def.h
@@ -0,0 +1,284 @@
1/**
2 * AMCC SoC PPC4xx Crypto Driver
3 *
4 * Copyright (c) 2008 Applied Micro Circuits Corporation.
5 * All rights reserved. James Hsiao <jhsiao@amcc.com>
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * This filr defines the register set for Security Subsystem
18 */
19
20#ifndef __CRYPTO4XX_REG_DEF_H__
21#define __CRYPTO4XX_REG_DEF_H__
22
23/* CRYPTO4XX Register offset */
24#define CRYPTO4XX_DESCRIPTOR 0x00000000
25#define CRYPTO4XX_CTRL_STAT 0x00000000
26#define CRYPTO4XX_SOURCE 0x00000004
27#define CRYPTO4XX_DEST 0x00000008
28#define CRYPTO4XX_SA 0x0000000C
29#define CRYPTO4XX_SA_LENGTH 0x00000010
30#define CRYPTO4XX_LENGTH 0x00000014
31
32#define CRYPTO4XX_PE_DMA_CFG 0x00000040
33#define CRYPTO4XX_PE_DMA_STAT 0x00000044
34#define CRYPTO4XX_PDR_BASE 0x00000048
35#define CRYPTO4XX_RDR_BASE 0x0000004c
36#define CRYPTO4XX_RING_SIZE 0x00000050
37#define CRYPTO4XX_RING_CTRL 0x00000054
38#define CRYPTO4XX_INT_RING_STAT 0x00000058
39#define CRYPTO4XX_EXT_RING_STAT 0x0000005c
40#define CRYPTO4XX_IO_THRESHOLD 0x00000060
41#define CRYPTO4XX_GATH_RING_BASE 0x00000064
42#define CRYPTO4XX_SCAT_RING_BASE 0x00000068
43#define CRYPTO4XX_PART_RING_SIZE 0x0000006c
44#define CRYPTO4XX_PART_RING_CFG 0x00000070
45
46#define CRYPTO4XX_PDR_BASE_UADDR 0x00000080
47#define CRYPTO4XX_RDR_BASE_UADDR 0x00000084
48#define CRYPTO4XX_PKT_SRC_UADDR 0x00000088
49#define CRYPTO4XX_PKT_DEST_UADDR 0x0000008c
50#define CRYPTO4XX_SA_UADDR 0x00000090
51#define CRYPTO4XX_GATH_RING_BASE_UADDR 0x000000A0
52#define CRYPTO4XX_SCAT_RING_BASE_UADDR 0x000000A4
53
54#define CRYPTO4XX_SEQ_RD 0x00000408
55#define CRYPTO4XX_SEQ_MASK_RD 0x0000040C
56
57#define CRYPTO4XX_SA_CMD_0 0x00010600
58#define CRYPTO4XX_SA_CMD_1 0x00010604
59
60#define CRYPTO4XX_STATE_PTR 0x000106dc
61#define CRYPTO4XX_STATE_IV 0x00010700
62#define CRYPTO4XX_STATE_HASH_BYTE_CNT_0 0x00010710
63#define CRYPTO4XX_STATE_HASH_BYTE_CNT_1 0x00010714
64
65#define CRYPTO4XX_STATE_IDIGEST_0 0x00010718
66#define CRYPTO4XX_STATE_IDIGEST_1 0x0001071c
67
68#define CRYPTO4XX_DATA_IN 0x00018000
69#define CRYPTO4XX_DATA_OUT 0x0001c000
70
71#define CRYPTO4XX_INT_UNMASK_STAT 0x000500a0
72#define CRYPTO4XX_INT_MASK_STAT 0x000500a4
73#define CRYPTO4XX_INT_CLR 0x000500a4
74#define CRYPTO4XX_INT_EN 0x000500a8
75
76#define CRYPTO4XX_INT_PKA 0x00000002
77#define CRYPTO4XX_INT_PDR_DONE 0x00008000
78#define CRYPTO4XX_INT_MA_WR_ERR 0x00020000
79#define CRYPTO4XX_INT_MA_RD_ERR 0x00010000
80#define CRYPTO4XX_INT_PE_ERR 0x00000200
81#define CRYPTO4XX_INT_USER_DMA_ERR 0x00000040
82#define CRYPTO4XX_INT_SLAVE_ERR 0x00000010
83#define CRYPTO4XX_INT_MASTER_ERR 0x00000008
84#define CRYPTO4XX_INT_ERROR 0x00030258
85
86#define CRYPTO4XX_INT_CFG 0x000500ac
87#define CRYPTO4XX_INT_DESCR_RD 0x000500b0
88#define CRYPTO4XX_INT_DESCR_CNT 0x000500b4
89#define CRYPTO4XX_INT_TIMEOUT_CNT 0x000500b8
90
91#define CRYPTO4XX_DEVICE_CTRL 0x00060080
92#define CRYPTO4XX_DEVICE_ID 0x00060084
93#define CRYPTO4XX_DEVICE_INFO 0x00060088
94#define CRYPTO4XX_DMA_USER_SRC 0x00060094
95#define CRYPTO4XX_DMA_USER_DEST 0x00060098
96#define CRYPTO4XX_DMA_USER_CMD 0x0006009C
97
98#define CRYPTO4XX_DMA_CFG 0x000600d4
99#define CRYPTO4XX_BYTE_ORDER_CFG 0x000600d8
100#define CRYPTO4XX_ENDIAN_CFG 0x000600d8
101
102#define CRYPTO4XX_PRNG_STAT 0x00070000
103#define CRYPTO4XX_PRNG_CTRL 0x00070004
104#define CRYPTO4XX_PRNG_SEED_L 0x00070008
105#define CRYPTO4XX_PRNG_SEED_H 0x0007000c
106
107#define CRYPTO4XX_PRNG_RES_0 0x00070020
108#define CRYPTO4XX_PRNG_RES_1 0x00070024
109#define CRYPTO4XX_PRNG_RES_2 0x00070028
110#define CRYPTO4XX_PRNG_RES_3 0x0007002C
111
112#define CRYPTO4XX_PRNG_LFSR_L 0x00070030
113#define CRYPTO4XX_PRNG_LFSR_H 0x00070034
114
115/**
116 * Initilize CRYPTO ENGINE registers, and memory bases.
117 */
118#define PPC4XX_PDR_POLL 0x3ff
119#define PPC4XX_OUTPUT_THRESHOLD 2
120#define PPC4XX_INPUT_THRESHOLD 2
121#define PPC4XX_PD_SIZE 6
122#define PPC4XX_CTX_DONE_INT 0x2000
123#define PPC4XX_PD_DONE_INT 0x8000
124#define PPC4XX_BYTE_ORDER 0x22222
125#define PPC4XX_INTERRUPT_CLR 0x3ffff
126#define PPC4XX_PRNG_CTRL_AUTO_EN 0x3
127#define PPC4XX_DC_3DES_EN 1
128#define PPC4XX_INT_DESCR_CNT 4
129#define PPC4XX_INT_TIMEOUT_CNT 0
130#define PPC4XX_INT_CFG 1
131/**
132 * all follow define are ad hoc
133 */
134#define PPC4XX_RING_RETRY 100
135#define PPC4XX_RING_POLL 100
136#define PPC4XX_SDR_SIZE PPC4XX_NUM_SD
137#define PPC4XX_GDR_SIZE PPC4XX_NUM_GD
138
139/**
140 * Generic Security Association (SA) with all possible fields. These will
141 * never likely used except for reference purpose. These structure format
142 * can be not changed as the hardware expects them to be layout as defined.
143 * Field can be removed or reduced but ordering can not be changed.
144 */
145#define CRYPTO4XX_DMA_CFG_OFFSET 0x40
146union ce_pe_dma_cfg {
147 struct {
148 u32 rsv:7;
149 u32 dir_host:1;
150 u32 rsv1:2;
151 u32 bo_td_en:1;
152 u32 dis_pdr_upd:1;
153 u32 bo_sgpd_en:1;
154 u32 bo_data_en:1;
155 u32 bo_sa_en:1;
156 u32 bo_pd_en:1;
157 u32 rsv2:4;
158 u32 dynamic_sa_en:1;
159 u32 pdr_mode:2;
160 u32 pe_mode:1;
161 u32 rsv3:5;
162 u32 reset_sg:1;
163 u32 reset_pdr:1;
164 u32 reset_pe:1;
165 } bf;
166 u32 w;
167} __attribute__((packed));
168
169#define CRYPTO4XX_PDR_BASE_OFFSET 0x48
170#define CRYPTO4XX_RDR_BASE_OFFSET 0x4c
171#define CRYPTO4XX_RING_SIZE_OFFSET 0x50
172union ce_ring_size {
173 struct {
174 u32 ring_offset:16;
175 u32 rsv:6;
176 u32 ring_size:10;
177 } bf;
178 u32 w;
179} __attribute__((packed));
180
181#define CRYPTO4XX_RING_CONTROL_OFFSET 0x54
182union ce_ring_contol {
183 struct {
184 u32 continuous:1;
185 u32 rsv:5;
186 u32 ring_retry_divisor:10;
187 u32 rsv1:4;
188 u32 ring_poll_divisor:10;
189 } bf;
190 u32 w;
191} __attribute__((packed));
192
193#define CRYPTO4XX_IO_THRESHOLD_OFFSET 0x60
194union ce_io_threshold {
195 struct {
196 u32 rsv:6;
197 u32 output_threshold:10;
198 u32 rsv1:6;
199 u32 input_threshold:10;
200 } bf;
201 u32 w;
202} __attribute__((packed));
203
204#define CRYPTO4XX_GATHER_RING_BASE_OFFSET 0x64
205#define CRYPTO4XX_SCATTER_RING_BASE_OFFSET 0x68
206
207union ce_part_ring_size {
208 struct {
209 u32 sdr_size:16;
210 u32 gdr_size:16;
211 } bf;
212 u32 w;
213} __attribute__((packed));
214
215#define MAX_BURST_SIZE_32 0
216#define MAX_BURST_SIZE_64 1
217#define MAX_BURST_SIZE_128 2
218#define MAX_BURST_SIZE_256 3
219
220/* gather descriptor control length */
221struct gd_ctl_len {
222 u32 len:16;
223 u32 rsv:14;
224 u32 done:1;
225 u32 ready:1;
226} __attribute__((packed));
227
228struct ce_gd {
229 u32 ptr;
230 struct gd_ctl_len ctl_len;
231} __attribute__((packed));
232
233struct sd_ctl {
234 u32 ctl:30;
235 u32 done:1;
236 u32 rdy:1;
237} __attribute__((packed));
238
239struct ce_sd {
240 u32 ptr;
241 struct sd_ctl ctl;
242} __attribute__((packed));
243
244#define PD_PAD_CTL_32 0x10
245#define PD_PAD_CTL_64 0x20
246#define PD_PAD_CTL_128 0x40
247#define PD_PAD_CTL_256 0x80
248union ce_pd_ctl {
249 struct {
250 u32 pd_pad_ctl:8;
251 u32 status:8;
252 u32 next_hdr:8;
253 u32 rsv:2;
254 u32 cached_sa:1;
255 u32 hash_final:1;
256 u32 init_arc4:1;
257 u32 rsv1:1;
258 u32 pe_done:1;
259 u32 host_ready:1;
260 } bf;
261 u32 w;
262} __attribute__((packed));
263
264union ce_pd_ctl_len {
265 struct {
266 u32 bypass:8;
267 u32 pe_done:1;
268 u32 host_ready:1;
269 u32 rsv:2;
270 u32 pkt_len:20;
271 } bf;
272 u32 w;
273} __attribute__((packed));
274
275struct ce_pd {
276 union ce_pd_ctl pd_ctl;
277 u32 src;
278 u32 dest;
279 u32 sa; /* get from ctx->sa_dma_addr */
280 u32 sa_len; /* only if dynamic sa is used */
281 union ce_pd_ctl_len pd_ctl_len;
282
283} __attribute__((packed));
284#endif
diff --git a/drivers/crypto/amcc/crypto4xx_sa.c b/drivers/crypto/amcc/crypto4xx_sa.c
new file mode 100644
index 00000000000..466fd94cd4a
--- /dev/null
+++ b/drivers/crypto/amcc/crypto4xx_sa.c
@@ -0,0 +1,108 @@
1/**
2 * AMCC SoC PPC4xx Crypto Driver
3 *
4 * Copyright (c) 2008 Applied Micro Circuits Corporation.
5 * All rights reserved. James Hsiao <jhsiao@amcc.com>
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * @file crypto4xx_sa.c
18 *
19 * This file implements the security context
20 * assoicate format.
21 */
22#include <linux/kernel.h>
23#include <linux/module.h>
24#include <linux/moduleparam.h>
25#include <linux/mod_devicetable.h>
26#include <linux/interrupt.h>
27#include <linux/spinlock_types.h>
28#include <linux/highmem.h>
29#include <linux/scatterlist.h>
30#include <linux/crypto.h>
31#include <crypto/algapi.h>
32#include <crypto/des.h>
33#include "crypto4xx_reg_def.h"
34#include "crypto4xx_sa.h"
35#include "crypto4xx_core.h"
36
37u32 get_dynamic_sa_offset_iv_field(struct crypto4xx_ctx *ctx)
38{
39 u32 offset;
40 union dynamic_sa_contents cts;
41
42 if (ctx->direction == DIR_INBOUND)
43 cts.w = ((struct dynamic_sa_ctl *)(ctx->sa_in))->sa_contents;
44 else
45 cts.w = ((struct dynamic_sa_ctl *)(ctx->sa_out))->sa_contents;
46 offset = cts.bf.key_size
47 + cts.bf.inner_size
48 + cts.bf.outer_size
49 + cts.bf.spi
50 + cts.bf.seq_num0
51 + cts.bf.seq_num1
52 + cts.bf.seq_num_mask0
53 + cts.bf.seq_num_mask1
54 + cts.bf.seq_num_mask2
55 + cts.bf.seq_num_mask3;
56
57 return sizeof(struct dynamic_sa_ctl) + offset * 4;
58}
59
60u32 get_dynamic_sa_offset_state_ptr_field(struct crypto4xx_ctx *ctx)
61{
62 u32 offset;
63 union dynamic_sa_contents cts;
64
65 if (ctx->direction == DIR_INBOUND)
66 cts.w = ((struct dynamic_sa_ctl *) ctx->sa_in)->sa_contents;
67 else
68 cts.w = ((struct dynamic_sa_ctl *) ctx->sa_out)->sa_contents;
69 offset = cts.bf.key_size
70 + cts.bf.inner_size
71 + cts.bf.outer_size
72 + cts.bf.spi
73 + cts.bf.seq_num0
74 + cts.bf.seq_num1
75 + cts.bf.seq_num_mask0
76 + cts.bf.seq_num_mask1
77 + cts.bf.seq_num_mask2
78 + cts.bf.seq_num_mask3
79 + cts.bf.iv0
80 + cts.bf.iv1
81 + cts.bf.iv2
82 + cts.bf.iv3;
83
84 return sizeof(struct dynamic_sa_ctl) + offset * 4;
85}
86
87u32 get_dynamic_sa_iv_size(struct crypto4xx_ctx *ctx)
88{
89 union dynamic_sa_contents cts;
90
91 if (ctx->direction == DIR_INBOUND)
92 cts.w = ((struct dynamic_sa_ctl *) ctx->sa_in)->sa_contents;
93 else
94 cts.w = ((struct dynamic_sa_ctl *) ctx->sa_out)->sa_contents;
95 return (cts.bf.iv0 + cts.bf.iv1 + cts.bf.iv2 + cts.bf.iv3) * 4;
96}
97
98u32 get_dynamic_sa_offset_key_field(struct crypto4xx_ctx *ctx)
99{
100 union dynamic_sa_contents cts;
101
102 if (ctx->direction == DIR_INBOUND)
103 cts.w = ((struct dynamic_sa_ctl *) ctx->sa_in)->sa_contents;
104 else
105 cts.w = ((struct dynamic_sa_ctl *) ctx->sa_out)->sa_contents;
106
107 return sizeof(struct dynamic_sa_ctl);
108}
diff --git a/drivers/crypto/amcc/crypto4xx_sa.h b/drivers/crypto/amcc/crypto4xx_sa.h
new file mode 100644
index 00000000000..4b83ed7e557
--- /dev/null
+++ b/drivers/crypto/amcc/crypto4xx_sa.h
@@ -0,0 +1,243 @@
1/**
2 * AMCC SoC PPC4xx Crypto Driver
3 *
4 * Copyright (c) 2008 Applied Micro Circuits Corporation.
5 * All rights reserved. James Hsiao <jhsiao@amcc.com>
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * This file defines the security context
18 * assoicate format.
19 */
20
21#ifndef __CRYPTO4XX_SA_H__
22#define __CRYPTO4XX_SA_H__
23
24#define AES_IV_SIZE 16
25
26/**
27 * Contents of Dynamic Security Association (SA) with all possible fields
28 */
29union dynamic_sa_contents {
30 struct {
31 u32 arc4_state_ptr:1;
32 u32 arc4_ij_ptr:1;
33 u32 state_ptr:1;
34 u32 iv3:1;
35 u32 iv2:1;
36 u32 iv1:1;
37 u32 iv0:1;
38 u32 seq_num_mask3:1;
39 u32 seq_num_mask2:1;
40 u32 seq_num_mask1:1;
41 u32 seq_num_mask0:1;
42 u32 seq_num1:1;
43 u32 seq_num0:1;
44 u32 spi:1;
45 u32 outer_size:5;
46 u32 inner_size:5;
47 u32 key_size:4;
48 u32 cmd_size:4;
49 } bf;
50 u32 w;
51} __attribute__((packed));
52
53#define DIR_OUTBOUND 0
54#define DIR_INBOUND 1
55#define SA_OP_GROUP_BASIC 0
56#define SA_OPCODE_ENCRYPT 0
57#define SA_OPCODE_DECRYPT 0
58#define SA_OPCODE_HASH 3
59#define SA_CIPHER_ALG_DES 0
60#define SA_CIPHER_ALG_3DES 1
61#define SA_CIPHER_ALG_ARC4 2
62#define SA_CIPHER_ALG_AES 3
63#define SA_CIPHER_ALG_KASUMI 4
64#define SA_CIPHER_ALG_NULL 15
65
66#define SA_HASH_ALG_MD5 0
67#define SA_HASH_ALG_SHA1 1
68#define SA_HASH_ALG_NULL 15
69#define SA_HASH_ALG_SHA1_DIGEST_SIZE 20
70
71#define SA_LOAD_HASH_FROM_SA 0
72#define SA_LOAD_HASH_FROM_STATE 2
73#define SA_NOT_LOAD_HASH 3
74#define SA_LOAD_IV_FROM_SA 0
75#define SA_LOAD_IV_FROM_INPUT 1
76#define SA_LOAD_IV_FROM_STATE 2
77#define SA_LOAD_IV_GEN_IV 3
78
79#define SA_PAD_TYPE_CONSTANT 2
80#define SA_PAD_TYPE_ZERO 3
81#define SA_PAD_TYPE_TLS 5
82#define SA_PAD_TYPE_DTLS 5
83#define SA_NOT_SAVE_HASH 0
84#define SA_SAVE_HASH 1
85#define SA_NOT_SAVE_IV 0
86#define SA_SAVE_IV 1
87#define SA_HEADER_PROC 1
88#define SA_NO_HEADER_PROC 0
89
90union sa_command_0 {
91 struct {
92 u32 scatter:1;
93 u32 gather:1;
94 u32 save_hash_state:1;
95 u32 save_iv:1;
96 u32 load_hash_state:2;
97 u32 load_iv:2;
98 u32 digest_len:4;
99 u32 hdr_proc:1;
100 u32 extend_pad:1;
101 u32 stream_cipher_pad:1;
102 u32 rsv:1;
103 u32 hash_alg:4;
104 u32 cipher_alg:4;
105 u32 pad_type:2;
106 u32 op_group:2;
107 u32 dir:1;
108 u32 opcode:3;
109 } bf;
110 u32 w;
111} __attribute__((packed));
112
113#define CRYPTO_MODE_ECB 0
114#define CRYPTO_MODE_CBC 1
115
116#define CRYPTO_FEEDBACK_MODE_NO_FB 0
117#define CRYPTO_FEEDBACK_MODE_64BIT_OFB 0
118#define CRYPTO_FEEDBACK_MODE_8BIT_CFB 1
119#define CRYPTO_FEEDBACK_MODE_1BIT_CFB 2
120#define CRYPTO_FEEDBACK_MODE_128BIT_CFB 3
121
122#define SA_AES_KEY_LEN_128 2
123#define SA_AES_KEY_LEN_192 3
124#define SA_AES_KEY_LEN_256 4
125
126#define SA_REV2 1
127/**
128 * The follow defines bits sa_command_1
129 * In Basic hash mode this bit define simple hash or hmac.
130 * In IPsec mode, this bit define muting control.
131 */
132#define SA_HASH_MODE_HASH 0
133#define SA_HASH_MODE_HMAC 1
134#define SA_MC_ENABLE 0
135#define SA_MC_DISABLE 1
136#define SA_NOT_COPY_HDR 0
137#define SA_COPY_HDR 1
138#define SA_NOT_COPY_PAD 0
139#define SA_COPY_PAD 1
140#define SA_NOT_COPY_PAYLOAD 0
141#define SA_COPY_PAYLOAD 1
142#define SA_EXTENDED_SN_OFF 0
143#define SA_EXTENDED_SN_ON 1
144#define SA_SEQ_MASK_OFF 0
145#define SA_SEQ_MASK_ON 1
146
147union sa_command_1 {
148 struct {
149 u32 crypto_mode31:1;
150 u32 save_arc4_state:1;
151 u32 arc4_stateful:1;
152 u32 key_len:5;
153 u32 hash_crypto_offset:8;
154 u32 sa_rev:2;
155 u32 byte_offset:1;
156 u32 hmac_muting:1;
157 u32 feedback_mode:2;
158 u32 crypto_mode9_8:2;
159 u32 extended_seq_num:1;
160 u32 seq_num_mask:1;
161 u32 mutable_bit_proc:1;
162 u32 ip_version:1;
163 u32 copy_pad:1;
164 u32 copy_payload:1;
165 u32 copy_hdr:1;
166 u32 rsv1:1;
167 } bf;
168 u32 w;
169} __attribute__((packed));
170
171struct dynamic_sa_ctl {
172 u32 sa_contents;
173 union sa_command_0 sa_command_0;
174 union sa_command_1 sa_command_1;
175} __attribute__((packed));
176
177/**
178 * State Record for Security Association (SA)
179 */
180struct sa_state_record {
181 u32 save_iv[4];
182 u32 save_hash_byte_cnt[2];
183 u32 save_digest[16];
184} __attribute__((packed));
185
186/**
187 * Security Association (SA) for AES128
188 *
189 */
190struct dynamic_sa_aes128 {
191 struct dynamic_sa_ctl ctrl;
192 u32 key[4];
193 u32 iv[4]; /* for CBC, OFC, and CFB mode */
194 u32 state_ptr;
195 u32 reserved;
196} __attribute__((packed));
197
198#define SA_AES128_LEN (sizeof(struct dynamic_sa_aes128)/4)
199#define SA_AES128_CONTENTS 0x3e000042
200
201/*
202 * Security Association (SA) for AES192
203 */
204struct dynamic_sa_aes192 {
205 struct dynamic_sa_ctl ctrl;
206 u32 key[6];
207 u32 iv[4]; /* for CBC, OFC, and CFB mode */
208 u32 state_ptr;
209 u32 reserved;
210} __attribute__((packed));
211
212#define SA_AES192_LEN (sizeof(struct dynamic_sa_aes192)/4)
213#define SA_AES192_CONTENTS 0x3e000062
214
215/**
216 * Security Association (SA) for AES256
217 */
218struct dynamic_sa_aes256 {
219 struct dynamic_sa_ctl ctrl;
220 u32 key[8];
221 u32 iv[4]; /* for CBC, OFC, and CFB mode */
222 u32 state_ptr;
223 u32 reserved;
224} __attribute__((packed));
225
226#define SA_AES256_LEN (sizeof(struct dynamic_sa_aes256)/4)
227#define SA_AES256_CONTENTS 0x3e000082
228#define SA_AES_CONTENTS 0x3e000002
229
230/**
231 * Security Association (SA) for HASH160: HMAC-SHA1
232 */
233struct dynamic_sa_hash160 {
234 struct dynamic_sa_ctl ctrl;
235 u32 inner_digest[5];
236 u32 outer_digest[5];
237 u32 state_ptr;
238 u32 reserved;
239} __attribute__((packed));
240#define SA_HASH160_LEN (sizeof(struct dynamic_sa_hash160)/4)
241#define SA_HASH160_CONTENTS 0x2000a502
242
243#endif