aboutsummaryrefslogtreecommitdiffstats
path: root/arch/sparc/crypto/camellia_glue.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sparc/crypto/camellia_glue.c')
-rw-r--r--arch/sparc/crypto/camellia_glue.c318
1 files changed, 318 insertions, 0 deletions
diff --git a/arch/sparc/crypto/camellia_glue.c b/arch/sparc/crypto/camellia_glue.c
new file mode 100644
index 00000000000..c258cc550a6
--- /dev/null
+++ b/arch/sparc/crypto/camellia_glue.c
@@ -0,0 +1,318 @@
1/* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes.
2 *
3 * Copyright (C) 2012 David S. Miller <davem@davemloft.net>
4 */
5
6#include <linux/crypto.h>
7#include <linux/init.h>
8#include <linux/module.h>
9#include <linux/mm.h>
10#include <linux/types.h>
11#include <crypto/algapi.h>
12
13#include <asm/fpumacro.h>
14#include <asm/pstate.h>
15#include <asm/elf.h>
16
17#define CAMELLIA_MIN_KEY_SIZE 16
18#define CAMELLIA_MAX_KEY_SIZE 32
19#define CAMELLIA_BLOCK_SIZE 16
20#define CAMELLIA_TABLE_BYTE_LEN 272
21
22struct camellia_sparc64_ctx {
23 u64 encrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
24 u64 decrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
25 int key_len;
26};
27
28extern void camellia_sparc64_key_expand(const u32 *in_key, u64 *encrypt_key,
29 unsigned int key_len, u64 *decrypt_key);
30
31static int camellia_set_key(struct crypto_tfm *tfm, const u8 *_in_key,
32 unsigned int key_len)
33{
34 struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
35 const u32 *in_key = (const u32 *) _in_key;
36 u32 *flags = &tfm->crt_flags;
37
38 if (key_len != 16 && key_len != 24 && key_len != 32) {
39 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
40 return -EINVAL;
41 }
42
43 ctx->key_len = key_len;
44
45 camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0],
46 key_len, &ctx->decrypt_key[0]);
47 return 0;
48}
49
50extern void camellia_sparc64_crypt(const u64 *key, const u32 *input,
51 u32 *output, unsigned int key_len);
52
53static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
54{
55 struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
56
57 camellia_sparc64_crypt(&ctx->encrypt_key[0],
58 (const u32 *) src,
59 (u32 *) dst, ctx->key_len);
60}
61
62static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
63{
64 struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
65
66 camellia_sparc64_crypt(&ctx->decrypt_key[0],
67 (const u32 *) src,
68 (u32 *) dst, ctx->key_len);
69}
70
71extern void camellia_sparc64_load_keys(const u64 *key, unsigned int key_len);
72
73typedef void ecb_crypt_op(const u64 *input, u64 *output, unsigned int len,
74 const u64 *key);
75
76extern ecb_crypt_op camellia_sparc64_ecb_crypt_3_grand_rounds;
77extern ecb_crypt_op camellia_sparc64_ecb_crypt_4_grand_rounds;
78
79#define CAMELLIA_BLOCK_MASK (~(CAMELLIA_BLOCK_SIZE - 1))
80
81static int __ecb_crypt(struct blkcipher_desc *desc,
82 struct scatterlist *dst, struct scatterlist *src,
83 unsigned int nbytes, bool encrypt)
84{
85 struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
86 struct blkcipher_walk walk;
87 ecb_crypt_op *op;
88 const u64 *key;
89 int err;
90
91 op = camellia_sparc64_ecb_crypt_3_grand_rounds;
92 if (ctx->key_len != 16)
93 op = camellia_sparc64_ecb_crypt_4_grand_rounds;
94
95 blkcipher_walk_init(&walk, dst, src, nbytes);
96 err = blkcipher_walk_virt(desc, &walk);
97
98 if (encrypt)
99 key = &ctx->encrypt_key[0];
100 else
101 key = &ctx->decrypt_key[0];
102 camellia_sparc64_load_keys(key, ctx->key_len);
103 while ((nbytes = walk.nbytes)) {
104 unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
105
106 if (likely(block_len)) {
107 const u64 *src64;
108 u64 *dst64;
109
110 src64 = (const u64 *)walk.src.virt.addr;
111 dst64 = (u64 *) walk.dst.virt.addr;
112 op(src64, dst64, block_len, key);
113 }
114 nbytes &= CAMELLIA_BLOCK_SIZE - 1;
115 err = blkcipher_walk_done(desc, &walk, nbytes);
116 }
117 fprs_write(0);
118 return err;
119}
120
121static int ecb_encrypt(struct blkcipher_desc *desc,
122 struct scatterlist *dst, struct scatterlist *src,
123 unsigned int nbytes)
124{
125 return __ecb_crypt(desc, dst, src, nbytes, true);
126}
127
128static int ecb_decrypt(struct blkcipher_desc *desc,
129 struct scatterlist *dst, struct scatterlist *src,
130 unsigned int nbytes)
131{
132 return __ecb_crypt(desc, dst, src, nbytes, false);
133}
134
135typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len,
136 const u64 *key, u64 *iv);
137
138extern cbc_crypt_op camellia_sparc64_cbc_encrypt_3_grand_rounds;
139extern cbc_crypt_op camellia_sparc64_cbc_encrypt_4_grand_rounds;
140extern cbc_crypt_op camellia_sparc64_cbc_decrypt_3_grand_rounds;
141extern cbc_crypt_op camellia_sparc64_cbc_decrypt_4_grand_rounds;
142
143static int cbc_encrypt(struct blkcipher_desc *desc,
144 struct scatterlist *dst, struct scatterlist *src,
145 unsigned int nbytes)
146{
147 struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
148 struct blkcipher_walk walk;
149 cbc_crypt_op *op;
150 const u64 *key;
151 int err;
152
153 op = camellia_sparc64_cbc_encrypt_3_grand_rounds;
154 if (ctx->key_len != 16)
155 op = camellia_sparc64_cbc_encrypt_4_grand_rounds;
156
157 blkcipher_walk_init(&walk, dst, src, nbytes);
158 err = blkcipher_walk_virt(desc, &walk);
159
160 key = &ctx->encrypt_key[0];
161 camellia_sparc64_load_keys(key, ctx->key_len);
162 while ((nbytes = walk.nbytes)) {
163 unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
164
165 if (likely(block_len)) {
166 const u64 *src64;
167 u64 *dst64;
168
169 src64 = (const u64 *)walk.src.virt.addr;
170 dst64 = (u64 *) walk.dst.virt.addr;
171 op(src64, dst64, block_len, key,
172 (u64 *) walk.iv);
173 }
174 nbytes &= CAMELLIA_BLOCK_SIZE - 1;
175 err = blkcipher_walk_done(desc, &walk, nbytes);
176 }
177 fprs_write(0);
178 return err;
179}
180
181static int cbc_decrypt(struct blkcipher_desc *desc,
182 struct scatterlist *dst, struct scatterlist *src,
183 unsigned int nbytes)
184{
185 struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
186 struct blkcipher_walk walk;
187 cbc_crypt_op *op;
188 const u64 *key;
189 int err;
190
191 op = camellia_sparc64_cbc_decrypt_3_grand_rounds;
192 if (ctx->key_len != 16)
193 op = camellia_sparc64_cbc_decrypt_4_grand_rounds;
194
195 blkcipher_walk_init(&walk, dst, src, nbytes);
196 err = blkcipher_walk_virt(desc, &walk);
197
198 key = &ctx->decrypt_key[0];
199 camellia_sparc64_load_keys(key, ctx->key_len);
200 while ((nbytes = walk.nbytes)) {
201 unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
202
203 if (likely(block_len)) {
204 const u64 *src64;
205 u64 *dst64;
206
207 src64 = (const u64 *)walk.src.virt.addr;
208 dst64 = (u64 *) walk.dst.virt.addr;
209 op(src64, dst64, block_len, key,
210 (u64 *) walk.iv);
211 }
212 nbytes &= CAMELLIA_BLOCK_SIZE - 1;
213 err = blkcipher_walk_done(desc, &walk, nbytes);
214 }
215 fprs_write(0);
216 return err;
217}
218
219static struct crypto_alg algs[] = { {
220 .cra_name = "camellia",
221 .cra_driver_name = "camellia-sparc64",
222 .cra_priority = 150,
223 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
224 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
225 .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
226 .cra_alignmask = 3,
227 .cra_module = THIS_MODULE,
228 .cra_u = {
229 .cipher = {
230 .cia_min_keysize = CAMELLIA_MIN_KEY_SIZE,
231 .cia_max_keysize = CAMELLIA_MAX_KEY_SIZE,
232 .cia_setkey = camellia_set_key,
233 .cia_encrypt = camellia_encrypt,
234 .cia_decrypt = camellia_decrypt
235 }
236 }
237}, {
238 .cra_name = "ecb(camellia)",
239 .cra_driver_name = "ecb-camellia-sparc64",
240 .cra_priority = 150,
241 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
242 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
243 .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
244 .cra_alignmask = 7,
245 .cra_type = &crypto_blkcipher_type,
246 .cra_module = THIS_MODULE,
247 .cra_u = {
248 .blkcipher = {
249 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
250 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
251 .setkey = camellia_set_key,
252 .encrypt = ecb_encrypt,
253 .decrypt = ecb_decrypt,
254 },
255 },
256}, {
257 .cra_name = "cbc(camellia)",
258 .cra_driver_name = "cbc-camellia-sparc64",
259 .cra_priority = 150,
260 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
261 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
262 .cra_ctxsize = sizeof(struct camellia_sparc64_ctx),
263 .cra_alignmask = 7,
264 .cra_type = &crypto_blkcipher_type,
265 .cra_module = THIS_MODULE,
266 .cra_u = {
267 .blkcipher = {
268 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
269 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
270 .setkey = camellia_set_key,
271 .encrypt = cbc_encrypt,
272 .decrypt = cbc_decrypt,
273 },
274 },
275}
276};
277
278static bool __init sparc64_has_camellia_opcode(void)
279{
280 unsigned long cfr;
281
282 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
283 return false;
284
285 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
286 if (!(cfr & CFR_CAMELLIA))
287 return false;
288
289 return true;
290}
291
292static int __init camellia_sparc64_mod_init(void)
293{
294 int i;
295
296 for (i = 0; i < ARRAY_SIZE(algs); i++)
297 INIT_LIST_HEAD(&algs[i].cra_list);
298
299 if (sparc64_has_camellia_opcode()) {
300 pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
301 return crypto_register_algs(algs, ARRAY_SIZE(algs));
302 }
303 pr_info("sparc64 camellia opcodes not available.\n");
304 return -ENODEV;
305}
306
307static void __exit camellia_sparc64_mod_fini(void)
308{
309 crypto_unregister_algs(algs, ARRAY_SIZE(algs));
310}
311
312module_init(camellia_sparc64_mod_init);
313module_exit(camellia_sparc64_mod_fini);
314
315MODULE_LICENSE("GPL");
316MODULE_DESCRIPTION("Camellia Cipher Algorithm, sparc64 camellia opcode accelerated");
317
318MODULE_ALIAS("aes");