aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Documentation/crypto/api-intro.txt41
-rw-r--r--arch/s390/crypto/aes_s390.c227
-rw-r--r--arch/x86/crypto/Makefile12
-rw-r--r--arch/x86/crypto/aes-i586-asm_32.S89
-rw-r--r--arch/x86/crypto/aes-x86_64-asm_64.S68
-rw-r--r--arch/x86/crypto/aes_32.c515
-rw-r--r--arch/x86/crypto/aes_64.c336
-rw-r--r--arch/x86/crypto/aes_glue.c57
-rw-r--r--arch/x86/crypto/salsa20-i586-asm_32.S1114
-rw-r--r--arch/x86/crypto/salsa20-x86_64-asm_64.S920
-rw-r--r--arch/x86/crypto/salsa20_glue.c129
-rw-r--r--arch/x86/crypto/twofish_64.c97
-rw-r--r--arch/x86/crypto/twofish_glue.c (renamed from arch/x86/crypto/twofish_32.c)8
-rw-r--r--crypto/Kconfig97
-rw-r--r--crypto/Makefile14
-rw-r--r--crypto/ablkcipher.c241
-rw-r--r--crypto/aead.c400
-rw-r--r--crypto/aes_generic.c468
-rw-r--r--crypto/algapi.c65
-rw-r--r--crypto/api.c19
-rw-r--r--crypto/authenc.c334
-rw-r--r--crypto/blkcipher.c202
-rw-r--r--crypto/camellia.c1781
-rw-r--r--crypto/cast6.c6
-rw-r--r--crypto/cbc.c109
-rw-r--r--crypto/ccm.c889
-rw-r--r--crypto/chainiv.c331
-rw-r--r--crypto/cryptd.c6
-rw-r--r--crypto/crypto_null.c70
-rw-r--r--crypto/ctr.c422
-rw-r--r--crypto/des_generic.c17
-rw-r--r--crypto/digest.c4
-rw-r--r--crypto/eseqiv.c264
-rw-r--r--crypto/gcm.c823
-rw-r--r--crypto/hmac.c3
-rw-r--r--crypto/internal.h31
-rw-r--r--crypto/lzo.c106
-rw-r--r--crypto/pcbc.c105
-rw-r--r--crypto/salsa20_generic.c255
-rw-r--r--crypto/scatterwalk.c10
-rw-r--r--crypto/seqiv.c345
-rw-r--r--crypto/sha256_generic.c72
-rw-r--r--crypto/tcrypt.c449
-rw-r--r--crypto/tcrypt.h3415
-rw-r--r--crypto/twofish_common.c96
-rw-r--r--crypto/xcbc.c10
-rw-r--r--drivers/char/hw_random/amd-rng.c12
-rw-r--r--drivers/char/hw_random/core.c24
-rw-r--r--drivers/char/hw_random/geode-rng.c12
-rw-r--r--drivers/char/hw_random/intel-rng.c15
-rw-r--r--drivers/char/hw_random/omap-rng.c13
-rw-r--r--drivers/char/hw_random/pasemi-rng.c16
-rw-r--r--drivers/char/hw_random/via-rng.c19
-rw-r--r--drivers/crypto/Kconfig11
-rw-r--r--drivers/crypto/Makefile1
-rw-r--r--drivers/crypto/geode-aes.c298
-rw-r--r--drivers/crypto/geode-aes.h44
-rw-r--r--drivers/crypto/hifn_795x.c2838
-rw-r--r--drivers/crypto/padlock-aes.c24
-rw-r--r--include/crypto/aead.h105
-rw-r--r--include/crypto/aes.h31
-rw-r--r--include/crypto/algapi.h31
-rw-r--r--include/crypto/authenc.h27
-rw-r--r--include/crypto/ctr.h20
-rw-r--r--include/crypto/des.h19
-rw-r--r--include/crypto/internal/aead.h80
-rw-r--r--include/crypto/internal/skcipher.h110
-rw-r--r--include/crypto/scatterwalk.h (renamed from crypto/scatterwalk.h)45
-rw-r--r--include/crypto/sha.h12
-rw-r--r--include/crypto/skcipher.h110
-rw-r--r--include/linux/crypto.h103
-rw-r--r--include/linux/hw_random.h2
72 files changed, 15840 insertions, 3254 deletions
diff --git a/Documentation/crypto/api-intro.txt b/Documentation/crypto/api-intro.txt
index a2ac6d294793..8b49302712a8 100644
--- a/Documentation/crypto/api-intro.txt
+++ b/Documentation/crypto/api-intro.txt
@@ -33,9 +33,16 @@ The idea is to make the user interface and algorithm registration API
33very simple, while hiding the core logic from both. Many good ideas 33very simple, while hiding the core logic from both. Many good ideas
34from existing APIs such as Cryptoapi and Nettle have been adapted for this. 34from existing APIs such as Cryptoapi and Nettle have been adapted for this.
35 35
36The API currently supports three types of transforms: Ciphers, Digests and 36The API currently supports five main types of transforms: AEAD (Authenticated
37Compressors. The compression algorithms especially seem to be performing 37Encryption with Associated Data), Block Ciphers, Ciphers, Compressors and
38very well so far. 38Hashes.
39
40Please note that Block Ciphers is somewhat of a misnomer. It is in fact
41meant to support all ciphers including stream ciphers. The difference
42between Block Ciphers and Ciphers is that the latter operates on exactly
43one block while the former can operate on an arbitrary amount of data,
44subject to block size requirements (i.e., non-stream ciphers can only
45process multiples of blocks).
39 46
40Support for hardware crypto devices via an asynchronous interface is 47Support for hardware crypto devices via an asynchronous interface is
41under development. 48under development.
@@ -69,29 +76,12 @@ Here's an example of how to use the API:
69Many real examples are available in the regression test module (tcrypt.c). 76Many real examples are available in the regression test module (tcrypt.c).
70 77
71 78
72CONFIGURATION NOTES
73
74As Triple DES is part of the DES module, for those using modular builds,
75add the following line to /etc/modprobe.conf:
76
77 alias des3_ede des
78
79The Null algorithms reside in the crypto_null module, so these lines
80should also be added:
81
82 alias cipher_null crypto_null
83 alias digest_null crypto_null
84 alias compress_null crypto_null
85
86The SHA384 algorithm shares code within the SHA512 module, so you'll
87also need:
88 alias sha384 sha512
89
90
91DEVELOPER NOTES 79DEVELOPER NOTES
92 80
93Transforms may only be allocated in user context, and cryptographic 81Transforms may only be allocated in user context, and cryptographic
94methods may only be called from softirq and user contexts. 82methods may only be called from softirq and user contexts. For
83transforms with a setkey method it too should only be called from
84user context.
95 85
96When using the API for ciphers, performance will be optimal if each 86When using the API for ciphers, performance will be optimal if each
97scatterlist contains data which is a multiple of the cipher's block 87scatterlist contains data which is a multiple of the cipher's block
@@ -130,8 +120,9 @@ might already be working on.
130BUGS 120BUGS
131 121
132Send bug reports to: 122Send bug reports to:
133Herbert Xu <herbert@gondor.apana.org.au> 123linux-crypto@vger.kernel.org
134Cc: David S. Miller <davem@redhat.com> 124Cc: Herbert Xu <herbert@gondor.apana.org.au>,
125 David S. Miller <davem@redhat.com>
135 126
136 127
137FURTHER INFORMATION 128FURTHER INFORMATION
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c
index 512669691ad0..46c97058ebe1 100644
--- a/arch/s390/crypto/aes_s390.c
+++ b/arch/s390/crypto/aes_s390.c
@@ -6,6 +6,7 @@
6 * s390 Version: 6 * s390 Version:
7 * Copyright IBM Corp. 2005,2007 7 * Copyright IBM Corp. 2005,2007
8 * Author(s): Jan Glauber (jang@de.ibm.com) 8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
9 * 10 *
10 * Derived from "crypto/aes_generic.c" 11 * Derived from "crypto/aes_generic.c"
11 * 12 *
@@ -16,17 +17,13 @@
16 * 17 *
17 */ 18 */
18 19
20#include <crypto/aes.h>
19#include <crypto/algapi.h> 21#include <crypto/algapi.h>
22#include <linux/err.h>
20#include <linux/module.h> 23#include <linux/module.h>
21#include <linux/init.h> 24#include <linux/init.h>
22#include "crypt_s390.h" 25#include "crypt_s390.h"
23 26
24#define AES_MIN_KEY_SIZE 16
25#define AES_MAX_KEY_SIZE 32
26
27/* data block size for all key lengths */
28#define AES_BLOCK_SIZE 16
29
30#define AES_KEYLEN_128 1 27#define AES_KEYLEN_128 1
31#define AES_KEYLEN_192 2 28#define AES_KEYLEN_192 2
32#define AES_KEYLEN_256 4 29#define AES_KEYLEN_256 4
@@ -39,45 +36,89 @@ struct s390_aes_ctx {
39 long enc; 36 long enc;
40 long dec; 37 long dec;
41 int key_len; 38 int key_len;
39 union {
40 struct crypto_blkcipher *blk;
41 struct crypto_cipher *cip;
42 } fallback;
42}; 43};
43 44
44static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 45/*
45 unsigned int key_len) 46 * Check if the key_len is supported by the HW.
47 * Returns 0 if it is, a positive number if it is not and software fallback is
48 * required or a negative number in case the key size is not valid
49 */
50static int need_fallback(unsigned int key_len)
46{ 51{
47 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
48 u32 *flags = &tfm->crt_flags;
49
50 switch (key_len) { 52 switch (key_len) {
51 case 16: 53 case 16:
52 if (!(keylen_flag & AES_KEYLEN_128)) 54 if (!(keylen_flag & AES_KEYLEN_128))
53 goto fail; 55 return 1;
54 break; 56 break;
55 case 24: 57 case 24:
56 if (!(keylen_flag & AES_KEYLEN_192)) 58 if (!(keylen_flag & AES_KEYLEN_192))
57 goto fail; 59 return 1;
58
59 break; 60 break;
60 case 32: 61 case 32:
61 if (!(keylen_flag & AES_KEYLEN_256)) 62 if (!(keylen_flag & AES_KEYLEN_256))
62 goto fail; 63 return 1;
63 break; 64 break;
64 default: 65 default:
65 goto fail; 66 return -1;
66 break; 67 break;
67 } 68 }
69 return 0;
70}
71
72static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
73 unsigned int key_len)
74{
75 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
76 int ret;
77
78 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
79 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
80 CRYPTO_TFM_REQ_MASK);
81
82 ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
83 if (ret) {
84 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
85 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
86 CRYPTO_TFM_RES_MASK);
87 }
88 return ret;
89}
90
91static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
92 unsigned int key_len)
93{
94 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
95 u32 *flags = &tfm->crt_flags;
96 int ret;
97
98 ret = need_fallback(key_len);
99 if (ret < 0) {
100 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
101 return -EINVAL;
102 }
68 103
69 sctx->key_len = key_len; 104 sctx->key_len = key_len;
70 memcpy(sctx->key, in_key, key_len); 105 if (!ret) {
71 return 0; 106 memcpy(sctx->key, in_key, key_len);
72fail: 107 return 0;
73 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 108 }
74 return -EINVAL; 109
110 return setkey_fallback_cip(tfm, in_key, key_len);
75} 111}
76 112
77static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 113static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
78{ 114{
79 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 115 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
80 116
117 if (unlikely(need_fallback(sctx->key_len))) {
118 crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
119 return;
120 }
121
81 switch (sctx->key_len) { 122 switch (sctx->key_len) {
82 case 16: 123 case 16:
83 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, 124 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
@@ -98,6 +139,11 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
98{ 139{
99 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 140 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
100 141
142 if (unlikely(need_fallback(sctx->key_len))) {
143 crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
144 return;
145 }
146
101 switch (sctx->key_len) { 147 switch (sctx->key_len) {
102 case 16: 148 case 16:
103 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, 149 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
@@ -114,6 +160,29 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
114 } 160 }
115} 161}
116 162
163static int fallback_init_cip(struct crypto_tfm *tfm)
164{
165 const char *name = tfm->__crt_alg->cra_name;
166 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
167
168 sctx->fallback.cip = crypto_alloc_cipher(name, 0,
169 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
170
171 if (IS_ERR(sctx->fallback.cip)) {
172 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
173 return PTR_ERR(sctx->fallback.blk);
174 }
175
176 return 0;
177}
178
179static void fallback_exit_cip(struct crypto_tfm *tfm)
180{
181 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
182
183 crypto_free_cipher(sctx->fallback.cip);
184 sctx->fallback.cip = NULL;
185}
117 186
118static struct crypto_alg aes_alg = { 187static struct crypto_alg aes_alg = {
119 .cra_name = "aes", 188 .cra_name = "aes",
@@ -125,6 +194,8 @@ static struct crypto_alg aes_alg = {
125 .cra_ctxsize = sizeof(struct s390_aes_ctx), 194 .cra_ctxsize = sizeof(struct s390_aes_ctx),
126 .cra_module = THIS_MODULE, 195 .cra_module = THIS_MODULE,
127 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), 196 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
197 .cra_init = fallback_init_cip,
198 .cra_exit = fallback_exit_cip,
128 .cra_u = { 199 .cra_u = {
129 .cipher = { 200 .cipher = {
130 .cia_min_keysize = AES_MIN_KEY_SIZE, 201 .cia_min_keysize = AES_MIN_KEY_SIZE,
@@ -136,10 +207,70 @@ static struct crypto_alg aes_alg = {
136 } 207 }
137}; 208};
138 209
210static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
211 unsigned int len)
212{
213 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
214 unsigned int ret;
215
216 sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
217 sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
218 CRYPTO_TFM_REQ_MASK);
219
220 ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
221 if (ret) {
222 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
223 tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
224 CRYPTO_TFM_RES_MASK);
225 }
226 return ret;
227}
228
229static int fallback_blk_dec(struct blkcipher_desc *desc,
230 struct scatterlist *dst, struct scatterlist *src,
231 unsigned int nbytes)
232{
233 unsigned int ret;
234 struct crypto_blkcipher *tfm;
235 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
236
237 tfm = desc->tfm;
238 desc->tfm = sctx->fallback.blk;
239
240 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
241
242 desc->tfm = tfm;
243 return ret;
244}
245
246static int fallback_blk_enc(struct blkcipher_desc *desc,
247 struct scatterlist *dst, struct scatterlist *src,
248 unsigned int nbytes)
249{
250 unsigned int ret;
251 struct crypto_blkcipher *tfm;
252 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
253
254 tfm = desc->tfm;
255 desc->tfm = sctx->fallback.blk;
256
257 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
258
259 desc->tfm = tfm;
260 return ret;
261}
262
139static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 263static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
140 unsigned int key_len) 264 unsigned int key_len)
141{ 265{
142 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 266 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
267 int ret;
268
269 ret = need_fallback(key_len);
270 if (ret > 0) {
271 sctx->key_len = key_len;
272 return setkey_fallback_blk(tfm, in_key, key_len);
273 }
143 274
144 switch (key_len) { 275 switch (key_len) {
145 case 16: 276 case 16:
@@ -188,6 +319,9 @@ static int ecb_aes_encrypt(struct blkcipher_desc *desc,
188 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 319 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
189 struct blkcipher_walk walk; 320 struct blkcipher_walk walk;
190 321
322 if (unlikely(need_fallback(sctx->key_len)))
323 return fallback_blk_enc(desc, dst, src, nbytes);
324
191 blkcipher_walk_init(&walk, dst, src, nbytes); 325 blkcipher_walk_init(&walk, dst, src, nbytes);
192 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); 326 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
193} 327}
@@ -199,10 +333,37 @@ static int ecb_aes_decrypt(struct blkcipher_desc *desc,
199 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 333 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
200 struct blkcipher_walk walk; 334 struct blkcipher_walk walk;
201 335
336 if (unlikely(need_fallback(sctx->key_len)))
337 return fallback_blk_dec(desc, dst, src, nbytes);
338
202 blkcipher_walk_init(&walk, dst, src, nbytes); 339 blkcipher_walk_init(&walk, dst, src, nbytes);
203 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk); 340 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
204} 341}
205 342
343static int fallback_init_blk(struct crypto_tfm *tfm)
344{
345 const char *name = tfm->__crt_alg->cra_name;
346 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
347
348 sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
349 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
350
351 if (IS_ERR(sctx->fallback.blk)) {
352 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
353 return PTR_ERR(sctx->fallback.blk);
354 }
355
356 return 0;
357}
358
359static void fallback_exit_blk(struct crypto_tfm *tfm)
360{
361 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
362
363 crypto_free_blkcipher(sctx->fallback.blk);
364 sctx->fallback.blk = NULL;
365}
366
206static struct crypto_alg ecb_aes_alg = { 367static struct crypto_alg ecb_aes_alg = {
207 .cra_name = "ecb(aes)", 368 .cra_name = "ecb(aes)",
208 .cra_driver_name = "ecb-aes-s390", 369 .cra_driver_name = "ecb-aes-s390",
@@ -214,6 +375,8 @@ static struct crypto_alg ecb_aes_alg = {
214 .cra_type = &crypto_blkcipher_type, 375 .cra_type = &crypto_blkcipher_type,
215 .cra_module = THIS_MODULE, 376 .cra_module = THIS_MODULE,
216 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list), 377 .cra_list = LIST_HEAD_INIT(ecb_aes_alg.cra_list),
378 .cra_init = fallback_init_blk,
379 .cra_exit = fallback_exit_blk,
217 .cra_u = { 380 .cra_u = {
218 .blkcipher = { 381 .blkcipher = {
219 .min_keysize = AES_MIN_KEY_SIZE, 382 .min_keysize = AES_MIN_KEY_SIZE,
@@ -229,6 +392,13 @@ static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
229 unsigned int key_len) 392 unsigned int key_len)
230{ 393{
231 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm); 394 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
395 int ret;
396
397 ret = need_fallback(key_len);
398 if (ret > 0) {
399 sctx->key_len = key_len;
400 return setkey_fallback_blk(tfm, in_key, key_len);
401 }
232 402
233 switch (key_len) { 403 switch (key_len) {
234 case 16: 404 case 16:
@@ -283,6 +453,9 @@ static int cbc_aes_encrypt(struct blkcipher_desc *desc,
283 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 453 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
284 struct blkcipher_walk walk; 454 struct blkcipher_walk walk;
285 455
456 if (unlikely(need_fallback(sctx->key_len)))
457 return fallback_blk_enc(desc, dst, src, nbytes);
458
286 blkcipher_walk_init(&walk, dst, src, nbytes); 459 blkcipher_walk_init(&walk, dst, src, nbytes);
287 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk); 460 return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
288} 461}
@@ -294,6 +467,9 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc,
294 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm); 467 struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
295 struct blkcipher_walk walk; 468 struct blkcipher_walk walk;
296 469
470 if (unlikely(need_fallback(sctx->key_len)))
471 return fallback_blk_dec(desc, dst, src, nbytes);
472
297 blkcipher_walk_init(&walk, dst, src, nbytes); 473 blkcipher_walk_init(&walk, dst, src, nbytes);
298 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk); 474 return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
299} 475}
@@ -309,6 +485,8 @@ static struct crypto_alg cbc_aes_alg = {
309 .cra_type = &crypto_blkcipher_type, 485 .cra_type = &crypto_blkcipher_type,
310 .cra_module = THIS_MODULE, 486 .cra_module = THIS_MODULE,
311 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list), 487 .cra_list = LIST_HEAD_INIT(cbc_aes_alg.cra_list),
488 .cra_init = fallback_init_blk,
489 .cra_exit = fallback_exit_blk,
312 .cra_u = { 490 .cra_u = {
313 .blkcipher = { 491 .blkcipher = {
314 .min_keysize = AES_MIN_KEY_SIZE, 492 .min_keysize = AES_MIN_KEY_SIZE,
@@ -336,14 +514,10 @@ static int __init aes_init(void)
336 return -EOPNOTSUPP; 514 return -EOPNOTSUPP;
337 515
338 /* z9 109 and z9 BC/EC only support 128 bit key length */ 516 /* z9 109 and z9 BC/EC only support 128 bit key length */
339 if (keylen_flag == AES_KEYLEN_128) { 517 if (keylen_flag == AES_KEYLEN_128)
340 aes_alg.cra_u.cipher.cia_max_keysize = AES_MIN_KEY_SIZE;
341 ecb_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
342 cbc_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
343 printk(KERN_INFO 518 printk(KERN_INFO
344 "aes_s390: hardware acceleration only available for" 519 "aes_s390: hardware acceleration only available for"
345 "128 bit keys\n"); 520 "128 bit keys\n");
346 }
347 521
348 ret = crypto_register_alg(&aes_alg); 522 ret = crypto_register_alg(&aes_alg);
349 if (ret) 523 if (ret)
@@ -382,4 +556,3 @@ MODULE_ALIAS("aes");
382 556
383MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm"); 557MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
384MODULE_LICENSE("GPL"); 558MODULE_LICENSE("GPL");
385
diff --git a/arch/x86/crypto/Makefile b/arch/x86/crypto/Makefile
index 46bb609e2444..3874c2de5403 100644
--- a/arch/x86/crypto/Makefile
+++ b/arch/x86/crypto/Makefile
@@ -4,12 +4,16 @@
4 4
5obj-$(CONFIG_CRYPTO_AES_586) += aes-i586.o 5obj-$(CONFIG_CRYPTO_AES_586) += aes-i586.o
6obj-$(CONFIG_CRYPTO_TWOFISH_586) += twofish-i586.o 6obj-$(CONFIG_CRYPTO_TWOFISH_586) += twofish-i586.o
7obj-$(CONFIG_CRYPTO_SALSA20_586) += salsa20-i586.o
7 8
8obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o 9obj-$(CONFIG_CRYPTO_AES_X86_64) += aes-x86_64.o
9obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o 10obj-$(CONFIG_CRYPTO_TWOFISH_X86_64) += twofish-x86_64.o
11obj-$(CONFIG_CRYPTO_SALSA20_X86_64) += salsa20-x86_64.o
10 12
11aes-i586-y := aes-i586-asm_32.o aes_32.o 13aes-i586-y := aes-i586-asm_32.o aes_glue.o
12twofish-i586-y := twofish-i586-asm_32.o twofish_32.o 14twofish-i586-y := twofish-i586-asm_32.o twofish_glue.o
15salsa20-i586-y := salsa20-i586-asm_32.o salsa20_glue.o
13 16
14aes-x86_64-y := aes-x86_64-asm_64.o aes_64.o 17aes-x86_64-y := aes-x86_64-asm_64.o aes_glue.o
15twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_64.o 18twofish-x86_64-y := twofish-x86_64-asm_64.o twofish_glue.o
19salsa20-x86_64-y := salsa20-x86_64-asm_64.o salsa20_glue.o
diff --git a/arch/x86/crypto/aes-i586-asm_32.S b/arch/x86/crypto/aes-i586-asm_32.S
index f942f0c8f630..1093bede3e0a 100644
--- a/arch/x86/crypto/aes-i586-asm_32.S
+++ b/arch/x86/crypto/aes-i586-asm_32.S
@@ -46,9 +46,9 @@
46#define in_blk 16 46#define in_blk 16
47 47
48/* offsets in crypto_tfm structure */ 48/* offsets in crypto_tfm structure */
49#define ekey (crypto_tfm_ctx_offset + 0) 49#define klen (crypto_tfm_ctx_offset + 0)
50#define nrnd (crypto_tfm_ctx_offset + 256) 50#define ekey (crypto_tfm_ctx_offset + 4)
51#define dkey (crypto_tfm_ctx_offset + 260) 51#define dkey (crypto_tfm_ctx_offset + 244)
52 52
53// register mapping for encrypt and decrypt subroutines 53// register mapping for encrypt and decrypt subroutines
54 54
@@ -221,8 +221,8 @@
221 221
222.global aes_enc_blk 222.global aes_enc_blk
223 223
224.extern ft_tab 224.extern crypto_ft_tab
225.extern fl_tab 225.extern crypto_fl_tab
226 226
227.align 4 227.align 4
228 228
@@ -236,7 +236,7 @@ aes_enc_blk:
2361: push %ebx 2361: push %ebx
237 mov in_blk+4(%esp),%r2 237 mov in_blk+4(%esp),%r2
238 push %esi 238 push %esi
239 mov nrnd(%ebp),%r3 // number of rounds 239 mov klen(%ebp),%r3 // key size
240 push %edi 240 push %edi
241#if ekey != 0 241#if ekey != 0
242 lea ekey(%ebp),%ebp // key pointer 242 lea ekey(%ebp),%ebp // key pointer
@@ -255,26 +255,26 @@ aes_enc_blk:
255 255
256 sub $8,%esp // space for register saves on stack 256 sub $8,%esp // space for register saves on stack
257 add $16,%ebp // increment to next round key 257 add $16,%ebp // increment to next round key
258 cmp $12,%r3 258 cmp $24,%r3
259 jb 4f // 10 rounds for 128-bit key 259 jb 4f // 10 rounds for 128-bit key
260 lea 32(%ebp),%ebp 260 lea 32(%ebp),%ebp
261 je 3f // 12 rounds for 192-bit key 261 je 3f // 12 rounds for 192-bit key
262 lea 32(%ebp),%ebp 262 lea 32(%ebp),%ebp
263 263
2642: fwd_rnd1( -64(%ebp) ,ft_tab) // 14 rounds for 256-bit key 2642: fwd_rnd1( -64(%ebp), crypto_ft_tab) // 14 rounds for 256-bit key
265 fwd_rnd2( -48(%ebp) ,ft_tab) 265 fwd_rnd2( -48(%ebp), crypto_ft_tab)
2663: fwd_rnd1( -32(%ebp) ,ft_tab) // 12 rounds for 192-bit key 2663: fwd_rnd1( -32(%ebp), crypto_ft_tab) // 12 rounds for 192-bit key
267 fwd_rnd2( -16(%ebp) ,ft_tab) 267 fwd_rnd2( -16(%ebp), crypto_ft_tab)
2684: fwd_rnd1( (%ebp) ,ft_tab) // 10 rounds for 128-bit key 2684: fwd_rnd1( (%ebp), crypto_ft_tab) // 10 rounds for 128-bit key
269 fwd_rnd2( +16(%ebp) ,ft_tab) 269 fwd_rnd2( +16(%ebp), crypto_ft_tab)
270 fwd_rnd1( +32(%ebp) ,ft_tab) 270 fwd_rnd1( +32(%ebp), crypto_ft_tab)
271 fwd_rnd2( +48(%ebp) ,ft_tab) 271 fwd_rnd2( +48(%ebp), crypto_ft_tab)
272 fwd_rnd1( +64(%ebp) ,ft_tab) 272 fwd_rnd1( +64(%ebp), crypto_ft_tab)
273 fwd_rnd2( +80(%ebp) ,ft_tab) 273 fwd_rnd2( +80(%ebp), crypto_ft_tab)
274 fwd_rnd1( +96(%ebp) ,ft_tab) 274 fwd_rnd1( +96(%ebp), crypto_ft_tab)
275 fwd_rnd2(+112(%ebp) ,ft_tab) 275 fwd_rnd2(+112(%ebp), crypto_ft_tab)
276 fwd_rnd1(+128(%ebp) ,ft_tab) 276 fwd_rnd1(+128(%ebp), crypto_ft_tab)
277 fwd_rnd2(+144(%ebp) ,fl_tab) // last round uses a different table 277 fwd_rnd2(+144(%ebp), crypto_fl_tab) // last round uses a different table
278 278
279// move final values to the output array. CAUTION: the 279// move final values to the output array. CAUTION: the
280// order of these assigns rely on the register mappings 280// order of these assigns rely on the register mappings
@@ -297,8 +297,8 @@ aes_enc_blk:
297 297
298.global aes_dec_blk 298.global aes_dec_blk
299 299
300.extern it_tab 300.extern crypto_it_tab
301.extern il_tab 301.extern crypto_il_tab
302 302
303.align 4 303.align 4
304 304
@@ -312,14 +312,11 @@ aes_dec_blk:
3121: push %ebx 3121: push %ebx
313 mov in_blk+4(%esp),%r2 313 mov in_blk+4(%esp),%r2
314 push %esi 314 push %esi
315 mov nrnd(%ebp),%r3 // number of rounds 315 mov klen(%ebp),%r3 // key size
316 push %edi 316 push %edi
317#if dkey != 0 317#if dkey != 0
318 lea dkey(%ebp),%ebp // key pointer 318 lea dkey(%ebp),%ebp // key pointer
319#endif 319#endif
320 mov %r3,%r0
321 shl $4,%r0
322 add %r0,%ebp
323 320
324// input four columns and xor in first round key 321// input four columns and xor in first round key
325 322
@@ -333,27 +330,27 @@ aes_dec_blk:
333 xor 12(%ebp),%r5 330 xor 12(%ebp),%r5
334 331
335 sub $8,%esp // space for register saves on stack 332 sub $8,%esp // space for register saves on stack
336 sub $16,%ebp // increment to next round key 333 add $16,%ebp // increment to next round key
337 cmp $12,%r3 334 cmp $24,%r3
338 jb 4f // 10 rounds for 128-bit key 335 jb 4f // 10 rounds for 128-bit key
339 lea -32(%ebp),%ebp 336 lea 32(%ebp),%ebp
340 je 3f // 12 rounds for 192-bit key 337 je 3f // 12 rounds for 192-bit key
341 lea -32(%ebp),%ebp 338 lea 32(%ebp),%ebp
342 339
3432: inv_rnd1( +64(%ebp), it_tab) // 14 rounds for 256-bit key 3402: inv_rnd1( -64(%ebp), crypto_it_tab) // 14 rounds for 256-bit key
344 inv_rnd2( +48(%ebp), it_tab) 341 inv_rnd2( -48(%ebp), crypto_it_tab)
3453: inv_rnd1( +32(%ebp), it_tab) // 12 rounds for 192-bit key 3423: inv_rnd1( -32(%ebp), crypto_it_tab) // 12 rounds for 192-bit key
346 inv_rnd2( +16(%ebp), it_tab) 343 inv_rnd2( -16(%ebp), crypto_it_tab)
3474: inv_rnd1( (%ebp), it_tab) // 10 rounds for 128-bit key 3444: inv_rnd1( (%ebp), crypto_it_tab) // 10 rounds for 128-bit key
348 inv_rnd2( -16(%ebp), it_tab) 345 inv_rnd2( +16(%ebp), crypto_it_tab)
349 inv_rnd1( -32(%ebp), it_tab) 346 inv_rnd1( +32(%ebp), crypto_it_tab)
350 inv_rnd2( -48(%ebp), it_tab) 347 inv_rnd2( +48(%ebp), crypto_it_tab)
351 inv_rnd1( -64(%ebp), it_tab) 348 inv_rnd1( +64(%ebp), crypto_it_tab)
352 inv_rnd2( -80(%ebp), it_tab) 349 inv_rnd2( +80(%ebp), crypto_it_tab)
353 inv_rnd1( -96(%ebp), it_tab) 350 inv_rnd1( +96(%ebp), crypto_it_tab)
354 inv_rnd2(-112(%ebp), it_tab) 351 inv_rnd2(+112(%ebp), crypto_it_tab)
355 inv_rnd1(-128(%ebp), it_tab) 352 inv_rnd1(+128(%ebp), crypto_it_tab)
356 inv_rnd2(-144(%ebp), il_tab) // last round uses a different table 353 inv_rnd2(+144(%ebp), crypto_il_tab) // last round uses a different table
357 354
358// move final values to the output array. CAUTION: the 355// move final values to the output array. CAUTION: the
359// order of these assigns rely on the register mappings 356// order of these assigns rely on the register mappings
diff --git a/arch/x86/crypto/aes-x86_64-asm_64.S b/arch/x86/crypto/aes-x86_64-asm_64.S
index 26b40de4d0b0..a120f526c3df 100644
--- a/arch/x86/crypto/aes-x86_64-asm_64.S
+++ b/arch/x86/crypto/aes-x86_64-asm_64.S
@@ -8,10 +8,10 @@
8 * including this sentence is retained in full. 8 * including this sentence is retained in full.
9 */ 9 */
10 10
11.extern aes_ft_tab 11.extern crypto_ft_tab
12.extern aes_it_tab 12.extern crypto_it_tab
13.extern aes_fl_tab 13.extern crypto_fl_tab
14.extern aes_il_tab 14.extern crypto_il_tab
15 15
16.text 16.text
17 17
@@ -56,13 +56,13 @@
56 .align 8; \ 56 .align 8; \
57FUNC: movq r1,r2; \ 57FUNC: movq r1,r2; \
58 movq r3,r4; \ 58 movq r3,r4; \
59 leaq BASE+KEY+52(r8),r9; \ 59 leaq BASE+KEY+48+4(r8),r9; \
60 movq r10,r11; \ 60 movq r10,r11; \
61 movl (r7),r5 ## E; \ 61 movl (r7),r5 ## E; \
62 movl 4(r7),r1 ## E; \ 62 movl 4(r7),r1 ## E; \
63 movl 8(r7),r6 ## E; \ 63 movl 8(r7),r6 ## E; \
64 movl 12(r7),r7 ## E; \ 64 movl 12(r7),r7 ## E; \
65 movl BASE(r8),r10 ## E; \ 65 movl BASE+0(r8),r10 ## E; \
66 xorl -48(r9),r5 ## E; \ 66 xorl -48(r9),r5 ## E; \
67 xorl -44(r9),r1 ## E; \ 67 xorl -44(r9),r1 ## E; \
68 xorl -40(r9),r6 ## E; \ 68 xorl -40(r9),r6 ## E; \
@@ -154,37 +154,37 @@ FUNC: movq r1,r2; \
154/* void aes_enc_blk(stuct crypto_tfm *tfm, u8 *out, const u8 *in) */ 154/* void aes_enc_blk(stuct crypto_tfm *tfm, u8 *out, const u8 *in) */
155 155
156 entry(aes_enc_blk,0,enc128,enc192) 156 entry(aes_enc_blk,0,enc128,enc192)
157 encrypt_round(aes_ft_tab,-96) 157 encrypt_round(crypto_ft_tab,-96)
158 encrypt_round(aes_ft_tab,-80) 158 encrypt_round(crypto_ft_tab,-80)
159enc192: encrypt_round(aes_ft_tab,-64) 159enc192: encrypt_round(crypto_ft_tab,-64)
160 encrypt_round(aes_ft_tab,-48) 160 encrypt_round(crypto_ft_tab,-48)
161enc128: encrypt_round(aes_ft_tab,-32) 161enc128: encrypt_round(crypto_ft_tab,-32)
162 encrypt_round(aes_ft_tab,-16) 162 encrypt_round(crypto_ft_tab,-16)
163 encrypt_round(aes_ft_tab, 0) 163 encrypt_round(crypto_ft_tab, 0)
164 encrypt_round(aes_ft_tab, 16) 164 encrypt_round(crypto_ft_tab, 16)
165 encrypt_round(aes_ft_tab, 32) 165 encrypt_round(crypto_ft_tab, 32)
166 encrypt_round(aes_ft_tab, 48) 166 encrypt_round(crypto_ft_tab, 48)
167 encrypt_round(aes_ft_tab, 64) 167 encrypt_round(crypto_ft_tab, 64)
168 encrypt_round(aes_ft_tab, 80) 168 encrypt_round(crypto_ft_tab, 80)
169 encrypt_round(aes_ft_tab, 96) 169 encrypt_round(crypto_ft_tab, 96)
170 encrypt_final(aes_fl_tab,112) 170 encrypt_final(crypto_fl_tab,112)
171 return 171 return
172 172
173/* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in) */ 173/* void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in) */
174 174
175 entry(aes_dec_blk,240,dec128,dec192) 175 entry(aes_dec_blk,240,dec128,dec192)
176 decrypt_round(aes_it_tab,-96) 176 decrypt_round(crypto_it_tab,-96)
177 decrypt_round(aes_it_tab,-80) 177 decrypt_round(crypto_it_tab,-80)
178dec192: decrypt_round(aes_it_tab,-64) 178dec192: decrypt_round(crypto_it_tab,-64)
179 decrypt_round(aes_it_tab,-48) 179 decrypt_round(crypto_it_tab,-48)
180dec128: decrypt_round(aes_it_tab,-32) 180dec128: decrypt_round(crypto_it_tab,-32)
181 decrypt_round(aes_it_tab,-16) 181 decrypt_round(crypto_it_tab,-16)
182 decrypt_round(aes_it_tab, 0) 182 decrypt_round(crypto_it_tab, 0)
183 decrypt_round(aes_it_tab, 16) 183 decrypt_round(crypto_it_tab, 16)
184 decrypt_round(aes_it_tab, 32) 184 decrypt_round(crypto_it_tab, 32)
185 decrypt_round(aes_it_tab, 48) 185 decrypt_round(crypto_it_tab, 48)
186 decrypt_round(aes_it_tab, 64) 186 decrypt_round(crypto_it_tab, 64)
187 decrypt_round(aes_it_tab, 80) 187 decrypt_round(crypto_it_tab, 80)
188 decrypt_round(aes_it_tab, 96) 188 decrypt_round(crypto_it_tab, 96)
189 decrypt_final(aes_il_tab,112) 189 decrypt_final(crypto_il_tab,112)
190 return 190 return
diff --git a/arch/x86/crypto/aes_32.c b/arch/x86/crypto/aes_32.c
deleted file mode 100644
index 49aad9397f10..000000000000
--- a/arch/x86/crypto/aes_32.c
+++ /dev/null
@@ -1,515 +0,0 @@
1/*
2 *
3 * Glue Code for optimized 586 assembler version of AES
4 *
5 * Copyright (c) 2002, Dr Brian Gladman <>, Worcester, UK.
6 * All rights reserved.
7 *
8 * LICENSE TERMS
9 *
10 * The free distribution and use of this software in both source and binary
11 * form is allowed (with or without changes) provided that:
12 *
13 * 1. distributions of this source code include the above copyright
14 * notice, this list of conditions and the following disclaimer;
15 *
16 * 2. distributions in binary form include the above copyright
17 * notice, this list of conditions and the following disclaimer
18 * in the documentation and/or other associated materials;
19 *
20 * 3. the copyright holder's name is not used to endorse products
21 * built using this software without specific written permission.
22 *
23 * ALTERNATIVELY, provided that this notice is retained in full, this product
24 * may be distributed under the terms of the GNU General Public License (GPL),
25 * in which case the provisions of the GPL apply INSTEAD OF those given above.
26 *
27 * DISCLAIMER
28 *
29 * This software is provided 'as is' with no explicit or implied warranties
30 * in respect of its properties, including, but not limited to, correctness
31 * and/or fitness for purpose.
32 *
33 * Copyright (c) 2003, Adam J. Richter <adam@yggdrasil.com> (conversion to
34 * 2.5 API).
35 * Copyright (c) 2003, 2004 Fruhwirth Clemens <clemens@endorphin.org>
36 * Copyright (c) 2004 Red Hat, Inc., James Morris <jmorris@redhat.com>
37 *
38 */
39
40#include <asm/byteorder.h>
41#include <linux/kernel.h>
42#include <linux/module.h>
43#include <linux/init.h>
44#include <linux/types.h>
45#include <linux/crypto.h>
46#include <linux/linkage.h>
47
48asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
49asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
50
51#define AES_MIN_KEY_SIZE 16
52#define AES_MAX_KEY_SIZE 32
53#define AES_BLOCK_SIZE 16
54#define AES_KS_LENGTH 4 * AES_BLOCK_SIZE
55#define RC_LENGTH 29
56
57struct aes_ctx {
58 u32 ekey[AES_KS_LENGTH];
59 u32 rounds;
60 u32 dkey[AES_KS_LENGTH];
61};
62
63#define WPOLY 0x011b
64#define bytes2word(b0, b1, b2, b3) \
65 (((u32)(b3) << 24) | ((u32)(b2) << 16) | ((u32)(b1) << 8) | (b0))
66
67/* define the finite field multiplies required for Rijndael */
68#define f2(x) ((x) ? pow[log[x] + 0x19] : 0)
69#define f3(x) ((x) ? pow[log[x] + 0x01] : 0)
70#define f9(x) ((x) ? pow[log[x] + 0xc7] : 0)
71#define fb(x) ((x) ? pow[log[x] + 0x68] : 0)
72#define fd(x) ((x) ? pow[log[x] + 0xee] : 0)
73#define fe(x) ((x) ? pow[log[x] + 0xdf] : 0)
74#define fi(x) ((x) ? pow[255 - log[x]]: 0)
75
76static inline u32 upr(u32 x, int n)
77{
78 return (x << 8 * n) | (x >> (32 - 8 * n));
79}
80
81static inline u8 bval(u32 x, int n)
82{
83 return x >> 8 * n;
84}
85
86/* The forward and inverse affine transformations used in the S-box */
87#define fwd_affine(x) \
88 (w = (u32)x, w ^= (w<<1)^(w<<2)^(w<<3)^(w<<4), 0x63^(u8)(w^(w>>8)))
89
90#define inv_affine(x) \
91 (w = (u32)x, w = (w<<1)^(w<<3)^(w<<6), 0x05^(u8)(w^(w>>8)))
92
93static u32 rcon_tab[RC_LENGTH];
94
95u32 ft_tab[4][256];
96u32 fl_tab[4][256];
97static u32 im_tab[4][256];
98u32 il_tab[4][256];
99u32 it_tab[4][256];
100
101static void gen_tabs(void)
102{
103 u32 i, w;
104 u8 pow[512], log[256];
105
106 /*
107 * log and power tables for GF(2^8) finite field with
108 * WPOLY as modular polynomial - the simplest primitive
109 * root is 0x03, used here to generate the tables.
110 */
111 i = 0; w = 1;
112
113 do {
114 pow[i] = (u8)w;
115 pow[i + 255] = (u8)w;
116 log[w] = (u8)i++;
117 w ^= (w << 1) ^ (w & 0x80 ? WPOLY : 0);
118 } while (w != 1);
119
120 for(i = 0, w = 1; i < RC_LENGTH; ++i) {
121 rcon_tab[i] = bytes2word(w, 0, 0, 0);
122 w = f2(w);
123 }
124
125 for(i = 0; i < 256; ++i) {
126 u8 b;
127
128 b = fwd_affine(fi((u8)i));
129 w = bytes2word(f2(b), b, b, f3(b));
130
131 /* tables for a normal encryption round */
132 ft_tab[0][i] = w;
133 ft_tab[1][i] = upr(w, 1);
134 ft_tab[2][i] = upr(w, 2);
135 ft_tab[3][i] = upr(w, 3);
136 w = bytes2word(b, 0, 0, 0);
137
138 /*
139 * tables for last encryption round
140 * (may also be used in the key schedule)
141 */
142 fl_tab[0][i] = w;
143 fl_tab[1][i] = upr(w, 1);
144 fl_tab[2][i] = upr(w, 2);
145 fl_tab[3][i] = upr(w, 3);
146
147 b = fi(inv_affine((u8)i));
148 w = bytes2word(fe(b), f9(b), fd(b), fb(b));
149
150 /* tables for the inverse mix column operation */
151 im_tab[0][b] = w;
152 im_tab[1][b] = upr(w, 1);
153 im_tab[2][b] = upr(w, 2);
154 im_tab[3][b] = upr(w, 3);
155
156 /* tables for a normal decryption round */
157 it_tab[0][i] = w;
158 it_tab[1][i] = upr(w,1);
159 it_tab[2][i] = upr(w,2);
160 it_tab[3][i] = upr(w,3);
161
162 w = bytes2word(b, 0, 0, 0);
163
164 /* tables for last decryption round */
165 il_tab[0][i] = w;
166 il_tab[1][i] = upr(w,1);
167 il_tab[2][i] = upr(w,2);
168 il_tab[3][i] = upr(w,3);
169 }
170}
171
172#define four_tables(x,tab,vf,rf,c) \
173( tab[0][bval(vf(x,0,c),rf(0,c))] ^ \
174 tab[1][bval(vf(x,1,c),rf(1,c))] ^ \
175 tab[2][bval(vf(x,2,c),rf(2,c))] ^ \
176 tab[3][bval(vf(x,3,c),rf(3,c))] \
177)
178
179#define vf1(x,r,c) (x)
180#define rf1(r,c) (r)
181#define rf2(r,c) ((r-c)&3)
182
183#define inv_mcol(x) four_tables(x,im_tab,vf1,rf1,0)
184#define ls_box(x,c) four_tables(x,fl_tab,vf1,rf2,c)
185
186#define ff(x) inv_mcol(x)
187
188#define ke4(k,i) \
189{ \
190 k[4*(i)+4] = ss[0] ^= ls_box(ss[3],3) ^ rcon_tab[i]; \
191 k[4*(i)+5] = ss[1] ^= ss[0]; \
192 k[4*(i)+6] = ss[2] ^= ss[1]; \
193 k[4*(i)+7] = ss[3] ^= ss[2]; \
194}
195
196#define kel4(k,i) \
197{ \
198 k[4*(i)+4] = ss[0] ^= ls_box(ss[3],3) ^ rcon_tab[i]; \
199 k[4*(i)+5] = ss[1] ^= ss[0]; \
200 k[4*(i)+6] = ss[2] ^= ss[1]; k[4*(i)+7] = ss[3] ^= ss[2]; \
201}
202
203#define ke6(k,i) \
204{ \
205 k[6*(i)+ 6] = ss[0] ^= ls_box(ss[5],3) ^ rcon_tab[i]; \
206 k[6*(i)+ 7] = ss[1] ^= ss[0]; \
207 k[6*(i)+ 8] = ss[2] ^= ss[1]; \
208 k[6*(i)+ 9] = ss[3] ^= ss[2]; \
209 k[6*(i)+10] = ss[4] ^= ss[3]; \
210 k[6*(i)+11] = ss[5] ^= ss[4]; \
211}
212
213#define kel6(k,i) \
214{ \
215 k[6*(i)+ 6] = ss[0] ^= ls_box(ss[5],3) ^ rcon_tab[i]; \
216 k[6*(i)+ 7] = ss[1] ^= ss[0]; \
217 k[6*(i)+ 8] = ss[2] ^= ss[1]; \
218 k[6*(i)+ 9] = ss[3] ^= ss[2]; \
219}
220
221#define ke8(k,i) \
222{ \
223 k[8*(i)+ 8] = ss[0] ^= ls_box(ss[7],3) ^ rcon_tab[i]; \
224 k[8*(i)+ 9] = ss[1] ^= ss[0]; \
225 k[8*(i)+10] = ss[2] ^= ss[1]; \
226 k[8*(i)+11] = ss[3] ^= ss[2]; \
227 k[8*(i)+12] = ss[4] ^= ls_box(ss[3],0); \
228 k[8*(i)+13] = ss[5] ^= ss[4]; \
229 k[8*(i)+14] = ss[6] ^= ss[5]; \
230 k[8*(i)+15] = ss[7] ^= ss[6]; \
231}
232
233#define kel8(k,i) \
234{ \
235 k[8*(i)+ 8] = ss[0] ^= ls_box(ss[7],3) ^ rcon_tab[i]; \
236 k[8*(i)+ 9] = ss[1] ^= ss[0]; \
237 k[8*(i)+10] = ss[2] ^= ss[1]; \
238 k[8*(i)+11] = ss[3] ^= ss[2]; \
239}
240
241#define kdf4(k,i) \
242{ \
243 ss[0] = ss[0] ^ ss[2] ^ ss[1] ^ ss[3]; \
244 ss[1] = ss[1] ^ ss[3]; \
245 ss[2] = ss[2] ^ ss[3]; \
246 ss[3] = ss[3]; \
247 ss[4] = ls_box(ss[(i+3) % 4], 3) ^ rcon_tab[i]; \
248 ss[i % 4] ^= ss[4]; \
249 ss[4] ^= k[4*(i)]; \
250 k[4*(i)+4] = ff(ss[4]); \
251 ss[4] ^= k[4*(i)+1]; \
252 k[4*(i)+5] = ff(ss[4]); \
253 ss[4] ^= k[4*(i)+2]; \
254 k[4*(i)+6] = ff(ss[4]); \
255 ss[4] ^= k[4*(i)+3]; \
256 k[4*(i)+7] = ff(ss[4]); \
257}
258
259#define kd4(k,i) \
260{ \
261 ss[4] = ls_box(ss[(i+3) % 4], 3) ^ rcon_tab[i]; \
262 ss[i % 4] ^= ss[4]; \
263 ss[4] = ff(ss[4]); \
264 k[4*(i)+4] = ss[4] ^= k[4*(i)]; \
265 k[4*(i)+5] = ss[4] ^= k[4*(i)+1]; \
266 k[4*(i)+6] = ss[4] ^= k[4*(i)+2]; \
267 k[4*(i)+7] = ss[4] ^= k[4*(i)+3]; \
268}
269
270#define kdl4(k,i) \
271{ \
272 ss[4] = ls_box(ss[(i+3) % 4], 3) ^ rcon_tab[i]; \
273 ss[i % 4] ^= ss[4]; \
274 k[4*(i)+4] = (ss[0] ^= ss[1]) ^ ss[2] ^ ss[3]; \
275 k[4*(i)+5] = ss[1] ^ ss[3]; \
276 k[4*(i)+6] = ss[0]; \
277 k[4*(i)+7] = ss[1]; \
278}
279
280#define kdf6(k,i) \
281{ \
282 ss[0] ^= ls_box(ss[5],3) ^ rcon_tab[i]; \
283 k[6*(i)+ 6] = ff(ss[0]); \
284 ss[1] ^= ss[0]; \
285 k[6*(i)+ 7] = ff(ss[1]); \
286 ss[2] ^= ss[1]; \
287 k[6*(i)+ 8] = ff(ss[2]); \
288 ss[3] ^= ss[2]; \
289 k[6*(i)+ 9] = ff(ss[3]); \
290 ss[4] ^= ss[3]; \
291 k[6*(i)+10] = ff(ss[4]); \
292 ss[5] ^= ss[4]; \
293 k[6*(i)+11] = ff(ss[5]); \
294}
295
296#define kd6(k,i) \
297{ \
298 ss[6] = ls_box(ss[5],3) ^ rcon_tab[i]; \
299 ss[0] ^= ss[6]; ss[6] = ff(ss[6]); \
300 k[6*(i)+ 6] = ss[6] ^= k[6*(i)]; \
301 ss[1] ^= ss[0]; \
302 k[6*(i)+ 7] = ss[6] ^= k[6*(i)+ 1]; \
303 ss[2] ^= ss[1]; \
304 k[6*(i)+ 8] = ss[6] ^= k[6*(i)+ 2]; \
305 ss[3] ^= ss[2]; \
306 k[6*(i)+ 9] = ss[6] ^= k[6*(i)+ 3]; \
307 ss[4] ^= ss[3]; \
308 k[6*(i)+10] = ss[6] ^= k[6*(i)+ 4]; \
309 ss[5] ^= ss[4]; \
310 k[6*(i)+11] = ss[6] ^= k[6*(i)+ 5]; \
311}
312
313#define kdl6(k,i) \
314{ \
315 ss[0] ^= ls_box(ss[5],3) ^ rcon_tab[i]; \
316 k[6*(i)+ 6] = ss[0]; \
317 ss[1] ^= ss[0]; \
318 k[6*(i)+ 7] = ss[1]; \
319 ss[2] ^= ss[1]; \
320 k[6*(i)+ 8] = ss[2]; \
321 ss[3] ^= ss[2]; \
322 k[6*(i)+ 9] = ss[3]; \
323}
324
325#define kdf8(k,i) \
326{ \
327 ss[0] ^= ls_box(ss[7],3) ^ rcon_tab[i]; \
328 k[8*(i)+ 8] = ff(ss[0]); \
329 ss[1] ^= ss[0]; \
330 k[8*(i)+ 9] = ff(ss[1]); \
331 ss[2] ^= ss[1]; \
332 k[8*(i)+10] = ff(ss[2]); \
333 ss[3] ^= ss[2]; \
334 k[8*(i)+11] = ff(ss[3]); \
335 ss[4] ^= ls_box(ss[3],0); \
336 k[8*(i)+12] = ff(ss[4]); \
337 ss[5] ^= ss[4]; \
338 k[8*(i)+13] = ff(ss[5]); \
339 ss[6] ^= ss[5]; \
340 k[8*(i)+14] = ff(ss[6]); \
341 ss[7] ^= ss[6]; \
342 k[8*(i)+15] = ff(ss[7]); \
343}
344
345#define kd8(k,i) \
346{ \
347 u32 __g = ls_box(ss[7],3) ^ rcon_tab[i]; \
348 ss[0] ^= __g; \
349 __g = ff(__g); \
350 k[8*(i)+ 8] = __g ^= k[8*(i)]; \
351 ss[1] ^= ss[0]; \
352 k[8*(i)+ 9] = __g ^= k[8*(i)+ 1]; \
353 ss[2] ^= ss[1]; \
354 k[8*(i)+10] = __g ^= k[8*(i)+ 2]; \
355 ss[3] ^= ss[2]; \
356 k[8*(i)+11] = __g ^= k[8*(i)+ 3]; \
357 __g = ls_box(ss[3],0); \
358 ss[4] ^= __g; \
359 __g = ff(__g); \
360 k[8*(i)+12] = __g ^= k[8*(i)+ 4]; \
361 ss[5] ^= ss[4]; \
362 k[8*(i)+13] = __g ^= k[8*(i)+ 5]; \
363 ss[6] ^= ss[5]; \
364 k[8*(i)+14] = __g ^= k[8*(i)+ 6]; \
365 ss[7] ^= ss[6]; \
366 k[8*(i)+15] = __g ^= k[8*(i)+ 7]; \
367}
368
369#define kdl8(k,i) \
370{ \
371 ss[0] ^= ls_box(ss[7],3) ^ rcon_tab[i]; \
372 k[8*(i)+ 8] = ss[0]; \
373 ss[1] ^= ss[0]; \
374 k[8*(i)+ 9] = ss[1]; \
375 ss[2] ^= ss[1]; \
376 k[8*(i)+10] = ss[2]; \
377 ss[3] ^= ss[2]; \
378 k[8*(i)+11] = ss[3]; \
379}
380
381static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
382 unsigned int key_len)
383{
384 int i;
385 u32 ss[8];
386 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
387 const __le32 *key = (const __le32 *)in_key;
388 u32 *flags = &tfm->crt_flags;
389
390 /* encryption schedule */
391
392 ctx->ekey[0] = ss[0] = le32_to_cpu(key[0]);
393 ctx->ekey[1] = ss[1] = le32_to_cpu(key[1]);
394 ctx->ekey[2] = ss[2] = le32_to_cpu(key[2]);
395 ctx->ekey[3] = ss[3] = le32_to_cpu(key[3]);
396
397 switch(key_len) {
398 case 16:
399 for (i = 0; i < 9; i++)
400 ke4(ctx->ekey, i);
401 kel4(ctx->ekey, 9);
402 ctx->rounds = 10;
403 break;
404
405 case 24:
406 ctx->ekey[4] = ss[4] = le32_to_cpu(key[4]);
407 ctx->ekey[5] = ss[5] = le32_to_cpu(key[5]);
408 for (i = 0; i < 7; i++)
409 ke6(ctx->ekey, i);
410 kel6(ctx->ekey, 7);
411 ctx->rounds = 12;
412 break;
413
414 case 32:
415 ctx->ekey[4] = ss[4] = le32_to_cpu(key[4]);
416 ctx->ekey[5] = ss[5] = le32_to_cpu(key[5]);
417 ctx->ekey[6] = ss[6] = le32_to_cpu(key[6]);
418 ctx->ekey[7] = ss[7] = le32_to_cpu(key[7]);
419 for (i = 0; i < 6; i++)
420 ke8(ctx->ekey, i);
421 kel8(ctx->ekey, 6);
422 ctx->rounds = 14;
423 break;
424
425 default:
426 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
427 return -EINVAL;
428 }
429
430 /* decryption schedule */
431
432 ctx->dkey[0] = ss[0] = le32_to_cpu(key[0]);
433 ctx->dkey[1] = ss[1] = le32_to_cpu(key[1]);
434 ctx->dkey[2] = ss[2] = le32_to_cpu(key[2]);
435 ctx->dkey[3] = ss[3] = le32_to_cpu(key[3]);
436
437 switch (key_len) {
438 case 16:
439 kdf4(ctx->dkey, 0);
440 for (i = 1; i < 9; i++)
441 kd4(ctx->dkey, i);
442 kdl4(ctx->dkey, 9);
443 break;
444
445 case 24:
446 ctx->dkey[4] = ff(ss[4] = le32_to_cpu(key[4]));
447 ctx->dkey[5] = ff(ss[5] = le32_to_cpu(key[5]));
448 kdf6(ctx->dkey, 0);
449 for (i = 1; i < 7; i++)
450 kd6(ctx->dkey, i);
451 kdl6(ctx->dkey, 7);
452 break;
453
454 case 32:
455 ctx->dkey[4] = ff(ss[4] = le32_to_cpu(key[4]));
456 ctx->dkey[5] = ff(ss[5] = le32_to_cpu(key[5]));
457 ctx->dkey[6] = ff(ss[6] = le32_to_cpu(key[6]));
458 ctx->dkey[7] = ff(ss[7] = le32_to_cpu(key[7]));
459 kdf8(ctx->dkey, 0);
460 for (i = 1; i < 6; i++)
461 kd8(ctx->dkey, i);
462 kdl8(ctx->dkey, 6);
463 break;
464 }
465 return 0;
466}
467
468static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
469{
470 aes_enc_blk(tfm, dst, src);
471}
472
473static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
474{
475 aes_dec_blk(tfm, dst, src);
476}
477
478static struct crypto_alg aes_alg = {
479 .cra_name = "aes",
480 .cra_driver_name = "aes-i586",
481 .cra_priority = 200,
482 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
483 .cra_blocksize = AES_BLOCK_SIZE,
484 .cra_ctxsize = sizeof(struct aes_ctx),
485 .cra_module = THIS_MODULE,
486 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
487 .cra_u = {
488 .cipher = {
489 .cia_min_keysize = AES_MIN_KEY_SIZE,
490 .cia_max_keysize = AES_MAX_KEY_SIZE,
491 .cia_setkey = aes_set_key,
492 .cia_encrypt = aes_encrypt,
493 .cia_decrypt = aes_decrypt
494 }
495 }
496};
497
498static int __init aes_init(void)
499{
500 gen_tabs();
501 return crypto_register_alg(&aes_alg);
502}
503
504static void __exit aes_fini(void)
505{
506 crypto_unregister_alg(&aes_alg);
507}
508
509module_init(aes_init);
510module_exit(aes_fini);
511
512MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, i586 asm optimized");
513MODULE_LICENSE("Dual BSD/GPL");
514MODULE_AUTHOR("Fruhwirth Clemens, James Morris, Brian Gladman, Adam Richter");
515MODULE_ALIAS("aes");
diff --git a/arch/x86/crypto/aes_64.c b/arch/x86/crypto/aes_64.c
deleted file mode 100644
index 5cdb13ea5cc2..000000000000
--- a/arch/x86/crypto/aes_64.c
+++ /dev/null
@@ -1,336 +0,0 @@
1/*
2 * Cryptographic API.
3 *
4 * AES Cipher Algorithm.
5 *
6 * Based on Brian Gladman's code.
7 *
8 * Linux developers:
9 * Alexander Kjeldaas <astor@fast.no>
10 * Herbert Valerio Riedel <hvr@hvrlab.org>
11 * Kyle McMartin <kyle@debian.org>
12 * Adam J. Richter <adam@yggdrasil.com> (conversion to 2.5 API).
13 * Andreas Steinmetz <ast@domdv.de> (adapted to x86_64 assembler)
14 *
15 * This program is free software; you can redistribute it and/or modify
16 * it under the terms of the GNU General Public License as published by
17 * the Free Software Foundation; either version 2 of the License, or
18 * (at your option) any later version.
19 *
20 * ---------------------------------------------------------------------------
21 * Copyright (c) 2002, Dr Brian Gladman <brg@gladman.me.uk>, Worcester, UK.
22 * All rights reserved.
23 *
24 * LICENSE TERMS
25 *
26 * The free distribution and use of this software in both source and binary
27 * form is allowed (with or without changes) provided that:
28 *
29 * 1. distributions of this source code include the above copyright
30 * notice, this list of conditions and the following disclaimer;
31 *
32 * 2. distributions in binary form include the above copyright
33 * notice, this list of conditions and the following disclaimer
34 * in the documentation and/or other associated materials;
35 *
36 * 3. the copyright holder's name is not used to endorse products
37 * built using this software without specific written permission.
38 *
39 * ALTERNATIVELY, provided that this notice is retained in full, this product
40 * may be distributed under the terms of the GNU General Public License (GPL),
41 * in which case the provisions of the GPL apply INSTEAD OF those given above.
42 *
43 * DISCLAIMER
44 *
45 * This software is provided 'as is' with no explicit or implied warranties
46 * in respect of its properties, including, but not limited to, correctness
47 * and/or fitness for purpose.
48 * ---------------------------------------------------------------------------
49 */
50
51/* Some changes from the Gladman version:
52 s/RIJNDAEL(e_key)/E_KEY/g
53 s/RIJNDAEL(d_key)/D_KEY/g
54*/
55
56#include <asm/byteorder.h>
57#include <linux/bitops.h>
58#include <linux/crypto.h>
59#include <linux/errno.h>
60#include <linux/init.h>
61#include <linux/module.h>
62#include <linux/types.h>
63
64#define AES_MIN_KEY_SIZE 16
65#define AES_MAX_KEY_SIZE 32
66
67#define AES_BLOCK_SIZE 16
68
69/*
70 * #define byte(x, nr) ((unsigned char)((x) >> (nr*8)))
71 */
72static inline u8 byte(const u32 x, const unsigned n)
73{
74 return x >> (n << 3);
75}
76
77struct aes_ctx
78{
79 u32 key_length;
80 u32 buf[120];
81};
82
83#define E_KEY (&ctx->buf[0])
84#define D_KEY (&ctx->buf[60])
85
86static u8 pow_tab[256] __initdata;
87static u8 log_tab[256] __initdata;
88static u8 sbx_tab[256] __initdata;
89static u8 isb_tab[256] __initdata;
90static u32 rco_tab[10];
91u32 aes_ft_tab[4][256];
92u32 aes_it_tab[4][256];
93
94u32 aes_fl_tab[4][256];
95u32 aes_il_tab[4][256];
96
97static inline u8 f_mult(u8 a, u8 b)
98{
99 u8 aa = log_tab[a], cc = aa + log_tab[b];
100
101 return pow_tab[cc + (cc < aa ? 1 : 0)];
102}
103
104#define ff_mult(a, b) (a && b ? f_mult(a, b) : 0)
105
106#define ls_box(x) \
107 (aes_fl_tab[0][byte(x, 0)] ^ \
108 aes_fl_tab[1][byte(x, 1)] ^ \
109 aes_fl_tab[2][byte(x, 2)] ^ \
110 aes_fl_tab[3][byte(x, 3)])
111
112static void __init gen_tabs(void)
113{
114 u32 i, t;
115 u8 p, q;
116
117 /* log and power tables for GF(2**8) finite field with
118 0x011b as modular polynomial - the simplest primitive
119 root is 0x03, used here to generate the tables */
120
121 for (i = 0, p = 1; i < 256; ++i) {
122 pow_tab[i] = (u8)p;
123 log_tab[p] = (u8)i;
124
125 p ^= (p << 1) ^ (p & 0x80 ? 0x01b : 0);
126 }
127
128 log_tab[1] = 0;
129
130 for (i = 0, p = 1; i < 10; ++i) {
131 rco_tab[i] = p;
132
133 p = (p << 1) ^ (p & 0x80 ? 0x01b : 0);
134 }
135
136 for (i = 0; i < 256; ++i) {
137 p = (i ? pow_tab[255 - log_tab[i]] : 0);
138 q = ((p >> 7) | (p << 1)) ^ ((p >> 6) | (p << 2));
139 p ^= 0x63 ^ q ^ ((q >> 6) | (q << 2));
140 sbx_tab[i] = p;
141 isb_tab[p] = (u8)i;
142 }
143
144 for (i = 0; i < 256; ++i) {
145 p = sbx_tab[i];
146
147 t = p;
148 aes_fl_tab[0][i] = t;
149 aes_fl_tab[1][i] = rol32(t, 8);
150 aes_fl_tab[2][i] = rol32(t, 16);
151 aes_fl_tab[3][i] = rol32(t, 24);
152
153 t = ((u32)ff_mult(2, p)) |
154 ((u32)p << 8) |
155 ((u32)p << 16) | ((u32)ff_mult(3, p) << 24);
156
157 aes_ft_tab[0][i] = t;
158 aes_ft_tab[1][i] = rol32(t, 8);
159 aes_ft_tab[2][i] = rol32(t, 16);
160 aes_ft_tab[3][i] = rol32(t, 24);
161
162 p = isb_tab[i];
163
164 t = p;
165 aes_il_tab[0][i] = t;
166 aes_il_tab[1][i] = rol32(t, 8);
167 aes_il_tab[2][i] = rol32(t, 16);
168 aes_il_tab[3][i] = rol32(t, 24);
169
170 t = ((u32)ff_mult(14, p)) |
171 ((u32)ff_mult(9, p) << 8) |
172 ((u32)ff_mult(13, p) << 16) |
173 ((u32)ff_mult(11, p) << 24);
174
175 aes_it_tab[0][i] = t;
176 aes_it_tab[1][i] = rol32(t, 8);
177 aes_it_tab[2][i] = rol32(t, 16);
178 aes_it_tab[3][i] = rol32(t, 24);
179 }
180}
181
182#define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b)
183
184#define imix_col(y, x) \
185 u = star_x(x); \
186 v = star_x(u); \
187 w = star_x(v); \
188 t = w ^ (x); \
189 (y) = u ^ v ^ w; \
190 (y) ^= ror32(u ^ t, 8) ^ \
191 ror32(v ^ t, 16) ^ \
192 ror32(t, 24)
193
194/* initialise the key schedule from the user supplied key */
195
196#define loop4(i) \
197{ \
198 t = ror32(t, 8); t = ls_box(t) ^ rco_tab[i]; \
199 t ^= E_KEY[4 * i]; E_KEY[4 * i + 4] = t; \
200 t ^= E_KEY[4 * i + 1]; E_KEY[4 * i + 5] = t; \
201 t ^= E_KEY[4 * i + 2]; E_KEY[4 * i + 6] = t; \
202 t ^= E_KEY[4 * i + 3]; E_KEY[4 * i + 7] = t; \
203}
204
205#define loop6(i) \
206{ \
207 t = ror32(t, 8); t = ls_box(t) ^ rco_tab[i]; \
208 t ^= E_KEY[6 * i]; E_KEY[6 * i + 6] = t; \
209 t ^= E_KEY[6 * i + 1]; E_KEY[6 * i + 7] = t; \
210 t ^= E_KEY[6 * i + 2]; E_KEY[6 * i + 8] = t; \
211 t ^= E_KEY[6 * i + 3]; E_KEY[6 * i + 9] = t; \
212 t ^= E_KEY[6 * i + 4]; E_KEY[6 * i + 10] = t; \
213 t ^= E_KEY[6 * i + 5]; E_KEY[6 * i + 11] = t; \
214}
215
216#define loop8(i) \
217{ \
218 t = ror32(t, 8); ; t = ls_box(t) ^ rco_tab[i]; \
219 t ^= E_KEY[8 * i]; E_KEY[8 * i + 8] = t; \
220 t ^= E_KEY[8 * i + 1]; E_KEY[8 * i + 9] = t; \
221 t ^= E_KEY[8 * i + 2]; E_KEY[8 * i + 10] = t; \
222 t ^= E_KEY[8 * i + 3]; E_KEY[8 * i + 11] = t; \
223 t = E_KEY[8 * i + 4] ^ ls_box(t); \
224 E_KEY[8 * i + 12] = t; \
225 t ^= E_KEY[8 * i + 5]; E_KEY[8 * i + 13] = t; \
226 t ^= E_KEY[8 * i + 6]; E_KEY[8 * i + 14] = t; \
227 t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \
228}
229
230static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
231 unsigned int key_len)
232{
233 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
234 const __le32 *key = (const __le32 *)in_key;
235 u32 *flags = &tfm->crt_flags;
236 u32 i, j, t, u, v, w;
237
238 if (key_len % 8) {
239 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
240 return -EINVAL;
241 }
242
243 ctx->key_length = key_len;
244
245 D_KEY[key_len + 24] = E_KEY[0] = le32_to_cpu(key[0]);
246 D_KEY[key_len + 25] = E_KEY[1] = le32_to_cpu(key[1]);
247 D_KEY[key_len + 26] = E_KEY[2] = le32_to_cpu(key[2]);
248 D_KEY[key_len + 27] = E_KEY[3] = le32_to_cpu(key[3]);
249
250 switch (key_len) {
251 case 16:
252 t = E_KEY[3];
253 for (i = 0; i < 10; ++i)
254 loop4(i);
255 break;
256
257 case 24:
258 E_KEY[4] = le32_to_cpu(key[4]);
259 t = E_KEY[5] = le32_to_cpu(key[5]);
260 for (i = 0; i < 8; ++i)
261 loop6 (i);
262 break;
263
264 case 32:
265 E_KEY[4] = le32_to_cpu(key[4]);
266 E_KEY[5] = le32_to_cpu(key[5]);
267 E_KEY[6] = le32_to_cpu(key[6]);
268 t = E_KEY[7] = le32_to_cpu(key[7]);
269 for (i = 0; i < 7; ++i)
270 loop8(i);
271 break;
272 }
273
274 D_KEY[0] = E_KEY[key_len + 24];
275 D_KEY[1] = E_KEY[key_len + 25];
276 D_KEY[2] = E_KEY[key_len + 26];
277 D_KEY[3] = E_KEY[key_len + 27];
278
279 for (i = 4; i < key_len + 24; ++i) {
280 j = key_len + 24 - (i & ~3) + (i & 3);
281 imix_col(D_KEY[j], E_KEY[i]);
282 }
283
284 return 0;
285}
286
287asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
288asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
289
290static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
291{
292 aes_enc_blk(tfm, dst, src);
293}
294
295static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
296{
297 aes_dec_blk(tfm, dst, src);
298}
299
300static struct crypto_alg aes_alg = {
301 .cra_name = "aes",
302 .cra_driver_name = "aes-x86_64",
303 .cra_priority = 200,
304 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
305 .cra_blocksize = AES_BLOCK_SIZE,
306 .cra_ctxsize = sizeof(struct aes_ctx),
307 .cra_module = THIS_MODULE,
308 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
309 .cra_u = {
310 .cipher = {
311 .cia_min_keysize = AES_MIN_KEY_SIZE,
312 .cia_max_keysize = AES_MAX_KEY_SIZE,
313 .cia_setkey = aes_set_key,
314 .cia_encrypt = aes_encrypt,
315 .cia_decrypt = aes_decrypt
316 }
317 }
318};
319
320static int __init aes_init(void)
321{
322 gen_tabs();
323 return crypto_register_alg(&aes_alg);
324}
325
326static void __exit aes_fini(void)
327{
328 crypto_unregister_alg(&aes_alg);
329}
330
331module_init(aes_init);
332module_exit(aes_fini);
333
334MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
335MODULE_LICENSE("GPL");
336MODULE_ALIAS("aes");
diff --git a/arch/x86/crypto/aes_glue.c b/arch/x86/crypto/aes_glue.c
new file mode 100644
index 000000000000..71f457827116
--- /dev/null
+++ b/arch/x86/crypto/aes_glue.c
@@ -0,0 +1,57 @@
1/*
2 * Glue Code for the asm optimized version of the AES Cipher Algorithm
3 *
4 */
5
6#include <crypto/aes.h>
7
8asmlinkage void aes_enc_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
9asmlinkage void aes_dec_blk(struct crypto_tfm *tfm, u8 *out, const u8 *in);
10
11static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
12{
13 aes_enc_blk(tfm, dst, src);
14}
15
16static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
17{
18 aes_dec_blk(tfm, dst, src);
19}
20
21static struct crypto_alg aes_alg = {
22 .cra_name = "aes",
23 .cra_driver_name = "aes-asm",
24 .cra_priority = 200,
25 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
26 .cra_blocksize = AES_BLOCK_SIZE,
27 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
28 .cra_module = THIS_MODULE,
29 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
30 .cra_u = {
31 .cipher = {
32 .cia_min_keysize = AES_MIN_KEY_SIZE,
33 .cia_max_keysize = AES_MAX_KEY_SIZE,
34 .cia_setkey = crypto_aes_set_key,
35 .cia_encrypt = aes_encrypt,
36 .cia_decrypt = aes_decrypt
37 }
38 }
39};
40
41static int __init aes_init(void)
42{
43 return crypto_register_alg(&aes_alg);
44}
45
46static void __exit aes_fini(void)
47{
48 crypto_unregister_alg(&aes_alg);
49}
50
51module_init(aes_init);
52module_exit(aes_fini);
53
54MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, asm optimized");
55MODULE_LICENSE("GPL");
56MODULE_ALIAS("aes");
57MODULE_ALIAS("aes-asm");
diff --git a/arch/x86/crypto/salsa20-i586-asm_32.S b/arch/x86/crypto/salsa20-i586-asm_32.S
new file mode 100644
index 000000000000..72eb306680b2
--- /dev/null
+++ b/arch/x86/crypto/salsa20-i586-asm_32.S
@@ -0,0 +1,1114 @@
1# salsa20_pm.s version 20051229
2# D. J. Bernstein
3# Public domain.
4
5# enter ECRYPT_encrypt_bytes
6.text
7.p2align 5
8.globl ECRYPT_encrypt_bytes
9ECRYPT_encrypt_bytes:
10 mov %esp,%eax
11 and $31,%eax
12 add $256,%eax
13 sub %eax,%esp
14 # eax_stack = eax
15 movl %eax,80(%esp)
16 # ebx_stack = ebx
17 movl %ebx,84(%esp)
18 # esi_stack = esi
19 movl %esi,88(%esp)
20 # edi_stack = edi
21 movl %edi,92(%esp)
22 # ebp_stack = ebp
23 movl %ebp,96(%esp)
24 # x = arg1
25 movl 4(%esp,%eax),%edx
26 # m = arg2
27 movl 8(%esp,%eax),%esi
28 # out = arg3
29 movl 12(%esp,%eax),%edi
30 # bytes = arg4
31 movl 16(%esp,%eax),%ebx
32 # bytes -= 0
33 sub $0,%ebx
34 # goto done if unsigned<=
35 jbe ._done
36._start:
37 # in0 = *(uint32 *) (x + 0)
38 movl 0(%edx),%eax
39 # in1 = *(uint32 *) (x + 4)
40 movl 4(%edx),%ecx
41 # in2 = *(uint32 *) (x + 8)
42 movl 8(%edx),%ebp
43 # j0 = in0
44 movl %eax,164(%esp)
45 # in3 = *(uint32 *) (x + 12)
46 movl 12(%edx),%eax
47 # j1 = in1
48 movl %ecx,168(%esp)
49 # in4 = *(uint32 *) (x + 16)
50 movl 16(%edx),%ecx
51 # j2 = in2
52 movl %ebp,172(%esp)
53 # in5 = *(uint32 *) (x + 20)
54 movl 20(%edx),%ebp
55 # j3 = in3
56 movl %eax,176(%esp)
57 # in6 = *(uint32 *) (x + 24)
58 movl 24(%edx),%eax
59 # j4 = in4
60 movl %ecx,180(%esp)
61 # in7 = *(uint32 *) (x + 28)
62 movl 28(%edx),%ecx
63 # j5 = in5
64 movl %ebp,184(%esp)
65 # in8 = *(uint32 *) (x + 32)
66 movl 32(%edx),%ebp
67 # j6 = in6
68 movl %eax,188(%esp)
69 # in9 = *(uint32 *) (x + 36)
70 movl 36(%edx),%eax
71 # j7 = in7
72 movl %ecx,192(%esp)
73 # in10 = *(uint32 *) (x + 40)
74 movl 40(%edx),%ecx
75 # j8 = in8
76 movl %ebp,196(%esp)
77 # in11 = *(uint32 *) (x + 44)
78 movl 44(%edx),%ebp
79 # j9 = in9
80 movl %eax,200(%esp)
81 # in12 = *(uint32 *) (x + 48)
82 movl 48(%edx),%eax
83 # j10 = in10
84 movl %ecx,204(%esp)
85 # in13 = *(uint32 *) (x + 52)
86 movl 52(%edx),%ecx
87 # j11 = in11
88 movl %ebp,208(%esp)
89 # in14 = *(uint32 *) (x + 56)
90 movl 56(%edx),%ebp
91 # j12 = in12
92 movl %eax,212(%esp)
93 # in15 = *(uint32 *) (x + 60)
94 movl 60(%edx),%eax
95 # j13 = in13
96 movl %ecx,216(%esp)
97 # j14 = in14
98 movl %ebp,220(%esp)
99 # j15 = in15
100 movl %eax,224(%esp)
101 # x_backup = x
102 movl %edx,64(%esp)
103._bytesatleast1:
104 # bytes - 64
105 cmp $64,%ebx
106 # goto nocopy if unsigned>=
107 jae ._nocopy
108 # ctarget = out
109 movl %edi,228(%esp)
110 # out = &tmp
111 leal 0(%esp),%edi
112 # i = bytes
113 mov %ebx,%ecx
114 # while (i) { *out++ = *m++; --i }
115 rep movsb
116 # out = &tmp
117 leal 0(%esp),%edi
118 # m = &tmp
119 leal 0(%esp),%esi
120._nocopy:
121 # out_backup = out
122 movl %edi,72(%esp)
123 # m_backup = m
124 movl %esi,68(%esp)
125 # bytes_backup = bytes
126 movl %ebx,76(%esp)
127 # in0 = j0
128 movl 164(%esp),%eax
129 # in1 = j1
130 movl 168(%esp),%ecx
131 # in2 = j2
132 movl 172(%esp),%edx
133 # in3 = j3
134 movl 176(%esp),%ebx
135 # x0 = in0
136 movl %eax,100(%esp)
137 # x1 = in1
138 movl %ecx,104(%esp)
139 # x2 = in2
140 movl %edx,108(%esp)
141 # x3 = in3
142 movl %ebx,112(%esp)
143 # in4 = j4
144 movl 180(%esp),%eax
145 # in5 = j5
146 movl 184(%esp),%ecx
147 # in6 = j6
148 movl 188(%esp),%edx
149 # in7 = j7
150 movl 192(%esp),%ebx
151 # x4 = in4
152 movl %eax,116(%esp)
153 # x5 = in5
154 movl %ecx,120(%esp)
155 # x6 = in6
156 movl %edx,124(%esp)
157 # x7 = in7
158 movl %ebx,128(%esp)
159 # in8 = j8
160 movl 196(%esp),%eax
161 # in9 = j9
162 movl 200(%esp),%ecx
163 # in10 = j10
164 movl 204(%esp),%edx
165 # in11 = j11
166 movl 208(%esp),%ebx
167 # x8 = in8
168 movl %eax,132(%esp)
169 # x9 = in9
170 movl %ecx,136(%esp)
171 # x10 = in10
172 movl %edx,140(%esp)
173 # x11 = in11
174 movl %ebx,144(%esp)
175 # in12 = j12
176 movl 212(%esp),%eax
177 # in13 = j13
178 movl 216(%esp),%ecx
179 # in14 = j14
180 movl 220(%esp),%edx
181 # in15 = j15
182 movl 224(%esp),%ebx
183 # x12 = in12
184 movl %eax,148(%esp)
185 # x13 = in13
186 movl %ecx,152(%esp)
187 # x14 = in14
188 movl %edx,156(%esp)
189 # x15 = in15
190 movl %ebx,160(%esp)
191 # i = 20
192 mov $20,%ebp
193 # p = x0
194 movl 100(%esp),%eax
195 # s = x5
196 movl 120(%esp),%ecx
197 # t = x10
198 movl 140(%esp),%edx
199 # w = x15
200 movl 160(%esp),%ebx
201._mainloop:
202 # x0 = p
203 movl %eax,100(%esp)
204 # x10 = t
205 movl %edx,140(%esp)
206 # p += x12
207 addl 148(%esp),%eax
208 # x5 = s
209 movl %ecx,120(%esp)
210 # t += x6
211 addl 124(%esp),%edx
212 # x15 = w
213 movl %ebx,160(%esp)
214 # r = x1
215 movl 104(%esp),%esi
216 # r += s
217 add %ecx,%esi
218 # v = x11
219 movl 144(%esp),%edi
220 # v += w
221 add %ebx,%edi
222 # p <<<= 7
223 rol $7,%eax
224 # p ^= x4
225 xorl 116(%esp),%eax
226 # t <<<= 7
227 rol $7,%edx
228 # t ^= x14
229 xorl 156(%esp),%edx
230 # r <<<= 7
231 rol $7,%esi
232 # r ^= x9
233 xorl 136(%esp),%esi
234 # v <<<= 7
235 rol $7,%edi
236 # v ^= x3
237 xorl 112(%esp),%edi
238 # x4 = p
239 movl %eax,116(%esp)
240 # x14 = t
241 movl %edx,156(%esp)
242 # p += x0
243 addl 100(%esp),%eax
244 # x9 = r
245 movl %esi,136(%esp)
246 # t += x10
247 addl 140(%esp),%edx
248 # x3 = v
249 movl %edi,112(%esp)
250 # p <<<= 9
251 rol $9,%eax
252 # p ^= x8
253 xorl 132(%esp),%eax
254 # t <<<= 9
255 rol $9,%edx
256 # t ^= x2
257 xorl 108(%esp),%edx
258 # s += r
259 add %esi,%ecx
260 # s <<<= 9
261 rol $9,%ecx
262 # s ^= x13
263 xorl 152(%esp),%ecx
264 # w += v
265 add %edi,%ebx
266 # w <<<= 9
267 rol $9,%ebx
268 # w ^= x7
269 xorl 128(%esp),%ebx
270 # x8 = p
271 movl %eax,132(%esp)
272 # x2 = t
273 movl %edx,108(%esp)
274 # p += x4
275 addl 116(%esp),%eax
276 # x13 = s
277 movl %ecx,152(%esp)
278 # t += x14
279 addl 156(%esp),%edx
280 # x7 = w
281 movl %ebx,128(%esp)
282 # p <<<= 13
283 rol $13,%eax
284 # p ^= x12
285 xorl 148(%esp),%eax
286 # t <<<= 13
287 rol $13,%edx
288 # t ^= x6
289 xorl 124(%esp),%edx
290 # r += s
291 add %ecx,%esi
292 # r <<<= 13
293 rol $13,%esi
294 # r ^= x1
295 xorl 104(%esp),%esi
296 # v += w
297 add %ebx,%edi
298 # v <<<= 13
299 rol $13,%edi
300 # v ^= x11
301 xorl 144(%esp),%edi
302 # x12 = p
303 movl %eax,148(%esp)
304 # x6 = t
305 movl %edx,124(%esp)
306 # p += x8
307 addl 132(%esp),%eax
308 # x1 = r
309 movl %esi,104(%esp)
310 # t += x2
311 addl 108(%esp),%edx
312 # x11 = v
313 movl %edi,144(%esp)
314 # p <<<= 18
315 rol $18,%eax
316 # p ^= x0
317 xorl 100(%esp),%eax
318 # t <<<= 18
319 rol $18,%edx
320 # t ^= x10
321 xorl 140(%esp),%edx
322 # s += r
323 add %esi,%ecx
324 # s <<<= 18
325 rol $18,%ecx
326 # s ^= x5
327 xorl 120(%esp),%ecx
328 # w += v
329 add %edi,%ebx
330 # w <<<= 18
331 rol $18,%ebx
332 # w ^= x15
333 xorl 160(%esp),%ebx
334 # x0 = p
335 movl %eax,100(%esp)
336 # x10 = t
337 movl %edx,140(%esp)
338 # p += x3
339 addl 112(%esp),%eax
340 # p <<<= 7
341 rol $7,%eax
342 # x5 = s
343 movl %ecx,120(%esp)
344 # t += x9
345 addl 136(%esp),%edx
346 # x15 = w
347 movl %ebx,160(%esp)
348 # r = x4
349 movl 116(%esp),%esi
350 # r += s
351 add %ecx,%esi
352 # v = x14
353 movl 156(%esp),%edi
354 # v += w
355 add %ebx,%edi
356 # p ^= x1
357 xorl 104(%esp),%eax
358 # t <<<= 7
359 rol $7,%edx
360 # t ^= x11
361 xorl 144(%esp),%edx
362 # r <<<= 7
363 rol $7,%esi
364 # r ^= x6
365 xorl 124(%esp),%esi
366 # v <<<= 7
367 rol $7,%edi
368 # v ^= x12
369 xorl 148(%esp),%edi
370 # x1 = p
371 movl %eax,104(%esp)
372 # x11 = t
373 movl %edx,144(%esp)
374 # p += x0
375 addl 100(%esp),%eax
376 # x6 = r
377 movl %esi,124(%esp)
378 # t += x10
379 addl 140(%esp),%edx
380 # x12 = v
381 movl %edi,148(%esp)
382 # p <<<= 9
383 rol $9,%eax
384 # p ^= x2
385 xorl 108(%esp),%eax
386 # t <<<= 9
387 rol $9,%edx
388 # t ^= x8
389 xorl 132(%esp),%edx
390 # s += r
391 add %esi,%ecx
392 # s <<<= 9
393 rol $9,%ecx
394 # s ^= x7
395 xorl 128(%esp),%ecx
396 # w += v
397 add %edi,%ebx
398 # w <<<= 9
399 rol $9,%ebx
400 # w ^= x13
401 xorl 152(%esp),%ebx
402 # x2 = p
403 movl %eax,108(%esp)
404 # x8 = t
405 movl %edx,132(%esp)
406 # p += x1
407 addl 104(%esp),%eax
408 # x7 = s
409 movl %ecx,128(%esp)
410 # t += x11
411 addl 144(%esp),%edx
412 # x13 = w
413 movl %ebx,152(%esp)
414 # p <<<= 13
415 rol $13,%eax
416 # p ^= x3
417 xorl 112(%esp),%eax
418 # t <<<= 13
419 rol $13,%edx
420 # t ^= x9
421 xorl 136(%esp),%edx
422 # r += s
423 add %ecx,%esi
424 # r <<<= 13
425 rol $13,%esi
426 # r ^= x4
427 xorl 116(%esp),%esi
428 # v += w
429 add %ebx,%edi
430 # v <<<= 13
431 rol $13,%edi
432 # v ^= x14
433 xorl 156(%esp),%edi
434 # x3 = p
435 movl %eax,112(%esp)
436 # x9 = t
437 movl %edx,136(%esp)
438 # p += x2
439 addl 108(%esp),%eax
440 # x4 = r
441 movl %esi,116(%esp)
442 # t += x8
443 addl 132(%esp),%edx
444 # x14 = v
445 movl %edi,156(%esp)
446 # p <<<= 18
447 rol $18,%eax
448 # p ^= x0
449 xorl 100(%esp),%eax
450 # t <<<= 18
451 rol $18,%edx
452 # t ^= x10
453 xorl 140(%esp),%edx
454 # s += r
455 add %esi,%ecx
456 # s <<<= 18
457 rol $18,%ecx
458 # s ^= x5
459 xorl 120(%esp),%ecx
460 # w += v
461 add %edi,%ebx
462 # w <<<= 18
463 rol $18,%ebx
464 # w ^= x15
465 xorl 160(%esp),%ebx
466 # x0 = p
467 movl %eax,100(%esp)
468 # x10 = t
469 movl %edx,140(%esp)
470 # p += x12
471 addl 148(%esp),%eax
472 # x5 = s
473 movl %ecx,120(%esp)
474 # t += x6
475 addl 124(%esp),%edx
476 # x15 = w
477 movl %ebx,160(%esp)
478 # r = x1
479 movl 104(%esp),%esi
480 # r += s
481 add %ecx,%esi
482 # v = x11
483 movl 144(%esp),%edi
484 # v += w
485 add %ebx,%edi
486 # p <<<= 7
487 rol $7,%eax
488 # p ^= x4
489 xorl 116(%esp),%eax
490 # t <<<= 7
491 rol $7,%edx
492 # t ^= x14
493 xorl 156(%esp),%edx
494 # r <<<= 7
495 rol $7,%esi
496 # r ^= x9
497 xorl 136(%esp),%esi
498 # v <<<= 7
499 rol $7,%edi
500 # v ^= x3
501 xorl 112(%esp),%edi
502 # x4 = p
503 movl %eax,116(%esp)
504 # x14 = t
505 movl %edx,156(%esp)
506 # p += x0
507 addl 100(%esp),%eax
508 # x9 = r
509 movl %esi,136(%esp)
510 # t += x10
511 addl 140(%esp),%edx
512 # x3 = v
513 movl %edi,112(%esp)
514 # p <<<= 9
515 rol $9,%eax
516 # p ^= x8
517 xorl 132(%esp),%eax
518 # t <<<= 9
519 rol $9,%edx
520 # t ^= x2
521 xorl 108(%esp),%edx
522 # s += r
523 add %esi,%ecx
524 # s <<<= 9
525 rol $9,%ecx
526 # s ^= x13
527 xorl 152(%esp),%ecx
528 # w += v
529 add %edi,%ebx
530 # w <<<= 9
531 rol $9,%ebx
532 # w ^= x7
533 xorl 128(%esp),%ebx
534 # x8 = p
535 movl %eax,132(%esp)
536 # x2 = t
537 movl %edx,108(%esp)
538 # p += x4
539 addl 116(%esp),%eax
540 # x13 = s
541 movl %ecx,152(%esp)
542 # t += x14
543 addl 156(%esp),%edx
544 # x7 = w
545 movl %ebx,128(%esp)
546 # p <<<= 13
547 rol $13,%eax
548 # p ^= x12
549 xorl 148(%esp),%eax
550 # t <<<= 13
551 rol $13,%edx
552 # t ^= x6
553 xorl 124(%esp),%edx
554 # r += s
555 add %ecx,%esi
556 # r <<<= 13
557 rol $13,%esi
558 # r ^= x1
559 xorl 104(%esp),%esi
560 # v += w
561 add %ebx,%edi
562 # v <<<= 13
563 rol $13,%edi
564 # v ^= x11
565 xorl 144(%esp),%edi
566 # x12 = p
567 movl %eax,148(%esp)
568 # x6 = t
569 movl %edx,124(%esp)
570 # p += x8
571 addl 132(%esp),%eax
572 # x1 = r
573 movl %esi,104(%esp)
574 # t += x2
575 addl 108(%esp),%edx
576 # x11 = v
577 movl %edi,144(%esp)
578 # p <<<= 18
579 rol $18,%eax
580 # p ^= x0
581 xorl 100(%esp),%eax
582 # t <<<= 18
583 rol $18,%edx
584 # t ^= x10
585 xorl 140(%esp),%edx
586 # s += r
587 add %esi,%ecx
588 # s <<<= 18
589 rol $18,%ecx
590 # s ^= x5
591 xorl 120(%esp),%ecx
592 # w += v
593 add %edi,%ebx
594 # w <<<= 18
595 rol $18,%ebx
596 # w ^= x15
597 xorl 160(%esp),%ebx
598 # x0 = p
599 movl %eax,100(%esp)
600 # x10 = t
601 movl %edx,140(%esp)
602 # p += x3
603 addl 112(%esp),%eax
604 # p <<<= 7
605 rol $7,%eax
606 # x5 = s
607 movl %ecx,120(%esp)
608 # t += x9
609 addl 136(%esp),%edx
610 # x15 = w
611 movl %ebx,160(%esp)
612 # r = x4
613 movl 116(%esp),%esi
614 # r += s
615 add %ecx,%esi
616 # v = x14
617 movl 156(%esp),%edi
618 # v += w
619 add %ebx,%edi
620 # p ^= x1
621 xorl 104(%esp),%eax
622 # t <<<= 7
623 rol $7,%edx
624 # t ^= x11
625 xorl 144(%esp),%edx
626 # r <<<= 7
627 rol $7,%esi
628 # r ^= x6
629 xorl 124(%esp),%esi
630 # v <<<= 7
631 rol $7,%edi
632 # v ^= x12
633 xorl 148(%esp),%edi
634 # x1 = p
635 movl %eax,104(%esp)
636 # x11 = t
637 movl %edx,144(%esp)
638 # p += x0
639 addl 100(%esp),%eax
640 # x6 = r
641 movl %esi,124(%esp)
642 # t += x10
643 addl 140(%esp),%edx
644 # x12 = v
645 movl %edi,148(%esp)
646 # p <<<= 9
647 rol $9,%eax
648 # p ^= x2
649 xorl 108(%esp),%eax
650 # t <<<= 9
651 rol $9,%edx
652 # t ^= x8
653 xorl 132(%esp),%edx
654 # s += r
655 add %esi,%ecx
656 # s <<<= 9
657 rol $9,%ecx
658 # s ^= x7
659 xorl 128(%esp),%ecx
660 # w += v
661 add %edi,%ebx
662 # w <<<= 9
663 rol $9,%ebx
664 # w ^= x13
665 xorl 152(%esp),%ebx
666 # x2 = p
667 movl %eax,108(%esp)
668 # x8 = t
669 movl %edx,132(%esp)
670 # p += x1
671 addl 104(%esp),%eax
672 # x7 = s
673 movl %ecx,128(%esp)
674 # t += x11
675 addl 144(%esp),%edx
676 # x13 = w
677 movl %ebx,152(%esp)
678 # p <<<= 13
679 rol $13,%eax
680 # p ^= x3
681 xorl 112(%esp),%eax
682 # t <<<= 13
683 rol $13,%edx
684 # t ^= x9
685 xorl 136(%esp),%edx
686 # r += s
687 add %ecx,%esi
688 # r <<<= 13
689 rol $13,%esi
690 # r ^= x4
691 xorl 116(%esp),%esi
692 # v += w
693 add %ebx,%edi
694 # v <<<= 13
695 rol $13,%edi
696 # v ^= x14
697 xorl 156(%esp),%edi
698 # x3 = p
699 movl %eax,112(%esp)
700 # x9 = t
701 movl %edx,136(%esp)
702 # p += x2
703 addl 108(%esp),%eax
704 # x4 = r
705 movl %esi,116(%esp)
706 # t += x8
707 addl 132(%esp),%edx
708 # x14 = v
709 movl %edi,156(%esp)
710 # p <<<= 18
711 rol $18,%eax
712 # p ^= x0
713 xorl 100(%esp),%eax
714 # t <<<= 18
715 rol $18,%edx
716 # t ^= x10
717 xorl 140(%esp),%edx
718 # s += r
719 add %esi,%ecx
720 # s <<<= 18
721 rol $18,%ecx
722 # s ^= x5
723 xorl 120(%esp),%ecx
724 # w += v
725 add %edi,%ebx
726 # w <<<= 18
727 rol $18,%ebx
728 # w ^= x15
729 xorl 160(%esp),%ebx
730 # i -= 4
731 sub $4,%ebp
732 # goto mainloop if unsigned >
733 ja ._mainloop
734 # x0 = p
735 movl %eax,100(%esp)
736 # x5 = s
737 movl %ecx,120(%esp)
738 # x10 = t
739 movl %edx,140(%esp)
740 # x15 = w
741 movl %ebx,160(%esp)
742 # out = out_backup
743 movl 72(%esp),%edi
744 # m = m_backup
745 movl 68(%esp),%esi
746 # in0 = x0
747 movl 100(%esp),%eax
748 # in1 = x1
749 movl 104(%esp),%ecx
750 # in0 += j0
751 addl 164(%esp),%eax
752 # in1 += j1
753 addl 168(%esp),%ecx
754 # in0 ^= *(uint32 *) (m + 0)
755 xorl 0(%esi),%eax
756 # in1 ^= *(uint32 *) (m + 4)
757 xorl 4(%esi),%ecx
758 # *(uint32 *) (out + 0) = in0
759 movl %eax,0(%edi)
760 # *(uint32 *) (out + 4) = in1
761 movl %ecx,4(%edi)
762 # in2 = x2
763 movl 108(%esp),%eax
764 # in3 = x3
765 movl 112(%esp),%ecx
766 # in2 += j2
767 addl 172(%esp),%eax
768 # in3 += j3
769 addl 176(%esp),%ecx
770 # in2 ^= *(uint32 *) (m + 8)
771 xorl 8(%esi),%eax
772 # in3 ^= *(uint32 *) (m + 12)
773 xorl 12(%esi),%ecx
774 # *(uint32 *) (out + 8) = in2
775 movl %eax,8(%edi)
776 # *(uint32 *) (out + 12) = in3
777 movl %ecx,12(%edi)
778 # in4 = x4
779 movl 116(%esp),%eax
780 # in5 = x5
781 movl 120(%esp),%ecx
782 # in4 += j4
783 addl 180(%esp),%eax
784 # in5 += j5
785 addl 184(%esp),%ecx
786 # in4 ^= *(uint32 *) (m + 16)
787 xorl 16(%esi),%eax
788 # in5 ^= *(uint32 *) (m + 20)
789 xorl 20(%esi),%ecx
790 # *(uint32 *) (out + 16) = in4
791 movl %eax,16(%edi)
792 # *(uint32 *) (out + 20) = in5
793 movl %ecx,20(%edi)
794 # in6 = x6
795 movl 124(%esp),%eax
796 # in7 = x7
797 movl 128(%esp),%ecx
798 # in6 += j6
799 addl 188(%esp),%eax
800 # in7 += j7
801 addl 192(%esp),%ecx
802 # in6 ^= *(uint32 *) (m + 24)
803 xorl 24(%esi),%eax
804 # in7 ^= *(uint32 *) (m + 28)
805 xorl 28(%esi),%ecx
806 # *(uint32 *) (out + 24) = in6
807 movl %eax,24(%edi)
808 # *(uint32 *) (out + 28) = in7
809 movl %ecx,28(%edi)
810 # in8 = x8
811 movl 132(%esp),%eax
812 # in9 = x9
813 movl 136(%esp),%ecx
814 # in8 += j8
815 addl 196(%esp),%eax
816 # in9 += j9
817 addl 200(%esp),%ecx
818 # in8 ^= *(uint32 *) (m + 32)
819 xorl 32(%esi),%eax
820 # in9 ^= *(uint32 *) (m + 36)
821 xorl 36(%esi),%ecx
822 # *(uint32 *) (out + 32) = in8
823 movl %eax,32(%edi)
824 # *(uint32 *) (out + 36) = in9
825 movl %ecx,36(%edi)
826 # in10 = x10
827 movl 140(%esp),%eax
828 # in11 = x11
829 movl 144(%esp),%ecx
830 # in10 += j10
831 addl 204(%esp),%eax
832 # in11 += j11
833 addl 208(%esp),%ecx
834 # in10 ^= *(uint32 *) (m + 40)
835 xorl 40(%esi),%eax
836 # in11 ^= *(uint32 *) (m + 44)
837 xorl 44(%esi),%ecx
838 # *(uint32 *) (out + 40) = in10
839 movl %eax,40(%edi)
840 # *(uint32 *) (out + 44) = in11
841 movl %ecx,44(%edi)
842 # in12 = x12
843 movl 148(%esp),%eax
844 # in13 = x13
845 movl 152(%esp),%ecx
846 # in12 += j12
847 addl 212(%esp),%eax
848 # in13 += j13
849 addl 216(%esp),%ecx
850 # in12 ^= *(uint32 *) (m + 48)
851 xorl 48(%esi),%eax
852 # in13 ^= *(uint32 *) (m + 52)
853 xorl 52(%esi),%ecx
854 # *(uint32 *) (out + 48) = in12
855 movl %eax,48(%edi)
856 # *(uint32 *) (out + 52) = in13
857 movl %ecx,52(%edi)
858 # in14 = x14
859 movl 156(%esp),%eax
860 # in15 = x15
861 movl 160(%esp),%ecx
862 # in14 += j14
863 addl 220(%esp),%eax
864 # in15 += j15
865 addl 224(%esp),%ecx
866 # in14 ^= *(uint32 *) (m + 56)
867 xorl 56(%esi),%eax
868 # in15 ^= *(uint32 *) (m + 60)
869 xorl 60(%esi),%ecx
870 # *(uint32 *) (out + 56) = in14
871 movl %eax,56(%edi)
872 # *(uint32 *) (out + 60) = in15
873 movl %ecx,60(%edi)
874 # bytes = bytes_backup
875 movl 76(%esp),%ebx
876 # in8 = j8
877 movl 196(%esp),%eax
878 # in9 = j9
879 movl 200(%esp),%ecx
880 # in8 += 1
881 add $1,%eax
882 # in9 += 0 + carry
883 adc $0,%ecx
884 # j8 = in8
885 movl %eax,196(%esp)
886 # j9 = in9
887 movl %ecx,200(%esp)
888 # bytes - 64
889 cmp $64,%ebx
890 # goto bytesatleast65 if unsigned>
891 ja ._bytesatleast65
892 # goto bytesatleast64 if unsigned>=
893 jae ._bytesatleast64
894 # m = out
895 mov %edi,%esi
896 # out = ctarget
897 movl 228(%esp),%edi
898 # i = bytes
899 mov %ebx,%ecx
900 # while (i) { *out++ = *m++; --i }
901 rep movsb
902._bytesatleast64:
903 # x = x_backup
904 movl 64(%esp),%eax
905 # in8 = j8
906 movl 196(%esp),%ecx
907 # in9 = j9
908 movl 200(%esp),%edx
909 # *(uint32 *) (x + 32) = in8
910 movl %ecx,32(%eax)
911 # *(uint32 *) (x + 36) = in9
912 movl %edx,36(%eax)
913._done:
914 # eax = eax_stack
915 movl 80(%esp),%eax
916 # ebx = ebx_stack
917 movl 84(%esp),%ebx
918 # esi = esi_stack
919 movl 88(%esp),%esi
920 # edi = edi_stack
921 movl 92(%esp),%edi
922 # ebp = ebp_stack
923 movl 96(%esp),%ebp
924 # leave
925 add %eax,%esp
926 ret
927._bytesatleast65:
928 # bytes -= 64
929 sub $64,%ebx
930 # out += 64
931 add $64,%edi
932 # m += 64
933 add $64,%esi
934 # goto bytesatleast1
935 jmp ._bytesatleast1
936# enter ECRYPT_keysetup
937.text
938.p2align 5
939.globl ECRYPT_keysetup
940ECRYPT_keysetup:
941 mov %esp,%eax
942 and $31,%eax
943 add $256,%eax
944 sub %eax,%esp
945 # eax_stack = eax
946 movl %eax,64(%esp)
947 # ebx_stack = ebx
948 movl %ebx,68(%esp)
949 # esi_stack = esi
950 movl %esi,72(%esp)
951 # edi_stack = edi
952 movl %edi,76(%esp)
953 # ebp_stack = ebp
954 movl %ebp,80(%esp)
955 # k = arg2
956 movl 8(%esp,%eax),%ecx
957 # kbits = arg3
958 movl 12(%esp,%eax),%edx
959 # x = arg1
960 movl 4(%esp,%eax),%eax
961 # in1 = *(uint32 *) (k + 0)
962 movl 0(%ecx),%ebx
963 # in2 = *(uint32 *) (k + 4)
964 movl 4(%ecx),%esi
965 # in3 = *(uint32 *) (k + 8)
966 movl 8(%ecx),%edi
967 # in4 = *(uint32 *) (k + 12)
968 movl 12(%ecx),%ebp
969 # *(uint32 *) (x + 4) = in1
970 movl %ebx,4(%eax)
971 # *(uint32 *) (x + 8) = in2
972 movl %esi,8(%eax)
973 # *(uint32 *) (x + 12) = in3
974 movl %edi,12(%eax)
975 # *(uint32 *) (x + 16) = in4
976 movl %ebp,16(%eax)
977 # kbits - 256
978 cmp $256,%edx
979 # goto kbits128 if unsigned<
980 jb ._kbits128
981._kbits256:
982 # in11 = *(uint32 *) (k + 16)
983 movl 16(%ecx),%edx
984 # in12 = *(uint32 *) (k + 20)
985 movl 20(%ecx),%ebx
986 # in13 = *(uint32 *) (k + 24)
987 movl 24(%ecx),%esi
988 # in14 = *(uint32 *) (k + 28)
989 movl 28(%ecx),%ecx
990 # *(uint32 *) (x + 44) = in11
991 movl %edx,44(%eax)
992 # *(uint32 *) (x + 48) = in12
993 movl %ebx,48(%eax)
994 # *(uint32 *) (x + 52) = in13
995 movl %esi,52(%eax)
996 # *(uint32 *) (x + 56) = in14
997 movl %ecx,56(%eax)
998 # in0 = 1634760805
999 mov $1634760805,%ecx
1000 # in5 = 857760878
1001 mov $857760878,%edx
1002 # in10 = 2036477234
1003 mov $2036477234,%ebx
1004 # in15 = 1797285236
1005 mov $1797285236,%esi
1006 # *(uint32 *) (x + 0) = in0
1007 movl %ecx,0(%eax)
1008 # *(uint32 *) (x + 20) = in5
1009 movl %edx,20(%eax)
1010 # *(uint32 *) (x + 40) = in10
1011 movl %ebx,40(%eax)
1012 # *(uint32 *) (x + 60) = in15
1013 movl %esi,60(%eax)
1014 # goto keysetupdone
1015 jmp ._keysetupdone
1016._kbits128:
1017 # in11 = *(uint32 *) (k + 0)
1018 movl 0(%ecx),%edx
1019 # in12 = *(uint32 *) (k + 4)
1020 movl 4(%ecx),%ebx
1021 # in13 = *(uint32 *) (k + 8)
1022 movl 8(%ecx),%esi
1023 # in14 = *(uint32 *) (k + 12)
1024 movl 12(%ecx),%ecx
1025 # *(uint32 *) (x + 44) = in11
1026 movl %edx,44(%eax)
1027 # *(uint32 *) (x + 48) = in12
1028 movl %ebx,48(%eax)
1029 # *(uint32 *) (x + 52) = in13
1030 movl %esi,52(%eax)
1031 # *(uint32 *) (x + 56) = in14
1032 movl %ecx,56(%eax)
1033 # in0 = 1634760805
1034 mov $1634760805,%ecx
1035 # in5 = 824206446
1036 mov $824206446,%edx
1037 # in10 = 2036477238
1038 mov $2036477238,%ebx
1039 # in15 = 1797285236
1040 mov $1797285236,%esi
1041 # *(uint32 *) (x + 0) = in0
1042 movl %ecx,0(%eax)
1043 # *(uint32 *) (x + 20) = in5
1044 movl %edx,20(%eax)
1045 # *(uint32 *) (x + 40) = in10
1046 movl %ebx,40(%eax)
1047 # *(uint32 *) (x + 60) = in15
1048 movl %esi,60(%eax)
1049._keysetupdone:
1050 # eax = eax_stack
1051 movl 64(%esp),%eax
1052 # ebx = ebx_stack
1053 movl 68(%esp),%ebx
1054 # esi = esi_stack
1055 movl 72(%esp),%esi
1056 # edi = edi_stack
1057 movl 76(%esp),%edi
1058 # ebp = ebp_stack
1059 movl 80(%esp),%ebp
1060 # leave
1061 add %eax,%esp
1062 ret
1063# enter ECRYPT_ivsetup
1064.text
1065.p2align 5
1066.globl ECRYPT_ivsetup
1067ECRYPT_ivsetup:
1068 mov %esp,%eax
1069 and $31,%eax
1070 add $256,%eax
1071 sub %eax,%esp
1072 # eax_stack = eax
1073 movl %eax,64(%esp)
1074 # ebx_stack = ebx
1075 movl %ebx,68(%esp)
1076 # esi_stack = esi
1077 movl %esi,72(%esp)
1078 # edi_stack = edi
1079 movl %edi,76(%esp)
1080 # ebp_stack = ebp
1081 movl %ebp,80(%esp)
1082 # iv = arg2
1083 movl 8(%esp,%eax),%ecx
1084 # x = arg1
1085 movl 4(%esp,%eax),%eax
1086 # in6 = *(uint32 *) (iv + 0)
1087 movl 0(%ecx),%edx
1088 # in7 = *(uint32 *) (iv + 4)
1089 movl 4(%ecx),%ecx
1090 # in8 = 0
1091 mov $0,%ebx
1092 # in9 = 0
1093 mov $0,%esi
1094 # *(uint32 *) (x + 24) = in6
1095 movl %edx,24(%eax)
1096 # *(uint32 *) (x + 28) = in7
1097 movl %ecx,28(%eax)
1098 # *(uint32 *) (x + 32) = in8
1099 movl %ebx,32(%eax)
1100 # *(uint32 *) (x + 36) = in9
1101 movl %esi,36(%eax)
1102 # eax = eax_stack
1103 movl 64(%esp),%eax
1104 # ebx = ebx_stack
1105 movl 68(%esp),%ebx
1106 # esi = esi_stack
1107 movl 72(%esp),%esi
1108 # edi = edi_stack
1109 movl 76(%esp),%edi
1110 # ebp = ebp_stack
1111 movl 80(%esp),%ebp
1112 # leave
1113 add %eax,%esp
1114 ret
diff --git a/arch/x86/crypto/salsa20-x86_64-asm_64.S b/arch/x86/crypto/salsa20-x86_64-asm_64.S
new file mode 100644
index 000000000000..6214a9b09706
--- /dev/null
+++ b/arch/x86/crypto/salsa20-x86_64-asm_64.S
@@ -0,0 +1,920 @@
1# enter ECRYPT_encrypt_bytes
2.text
3.p2align 5
4.globl ECRYPT_encrypt_bytes
5ECRYPT_encrypt_bytes:
6 mov %rsp,%r11
7 and $31,%r11
8 add $256,%r11
9 sub %r11,%rsp
10 # x = arg1
11 mov %rdi,%r8
12 # m = arg2
13 mov %rsi,%rsi
14 # out = arg3
15 mov %rdx,%rdi
16 # bytes = arg4
17 mov %rcx,%rdx
18 # unsigned>? bytes - 0
19 cmp $0,%rdx
20 # comment:fp stack unchanged by jump
21 # goto done if !unsigned>
22 jbe ._done
23 # comment:fp stack unchanged by fallthrough
24# start:
25._start:
26 # r11_stack = r11
27 movq %r11,0(%rsp)
28 # r12_stack = r12
29 movq %r12,8(%rsp)
30 # r13_stack = r13
31 movq %r13,16(%rsp)
32 # r14_stack = r14
33 movq %r14,24(%rsp)
34 # r15_stack = r15
35 movq %r15,32(%rsp)
36 # rbx_stack = rbx
37 movq %rbx,40(%rsp)
38 # rbp_stack = rbp
39 movq %rbp,48(%rsp)
40 # in0 = *(uint64 *) (x + 0)
41 movq 0(%r8),%rcx
42 # in2 = *(uint64 *) (x + 8)
43 movq 8(%r8),%r9
44 # in4 = *(uint64 *) (x + 16)
45 movq 16(%r8),%rax
46 # in6 = *(uint64 *) (x + 24)
47 movq 24(%r8),%r10
48 # in8 = *(uint64 *) (x + 32)
49 movq 32(%r8),%r11
50 # in10 = *(uint64 *) (x + 40)
51 movq 40(%r8),%r12
52 # in12 = *(uint64 *) (x + 48)
53 movq 48(%r8),%r13
54 # in14 = *(uint64 *) (x + 56)
55 movq 56(%r8),%r14
56 # j0 = in0
57 movq %rcx,56(%rsp)
58 # j2 = in2
59 movq %r9,64(%rsp)
60 # j4 = in4
61 movq %rax,72(%rsp)
62 # j6 = in6
63 movq %r10,80(%rsp)
64 # j8 = in8
65 movq %r11,88(%rsp)
66 # j10 = in10
67 movq %r12,96(%rsp)
68 # j12 = in12
69 movq %r13,104(%rsp)
70 # j14 = in14
71 movq %r14,112(%rsp)
72 # x_backup = x
73 movq %r8,120(%rsp)
74# bytesatleast1:
75._bytesatleast1:
76 # unsigned<? bytes - 64
77 cmp $64,%rdx
78 # comment:fp stack unchanged by jump
79 # goto nocopy if !unsigned<
80 jae ._nocopy
81 # ctarget = out
82 movq %rdi,128(%rsp)
83 # out = &tmp
84 leaq 192(%rsp),%rdi
85 # i = bytes
86 mov %rdx,%rcx
87 # while (i) { *out++ = *m++; --i }
88 rep movsb
89 # out = &tmp
90 leaq 192(%rsp),%rdi
91 # m = &tmp
92 leaq 192(%rsp),%rsi
93 # comment:fp stack unchanged by fallthrough
94# nocopy:
95._nocopy:
96 # out_backup = out
97 movq %rdi,136(%rsp)
98 # m_backup = m
99 movq %rsi,144(%rsp)
100 # bytes_backup = bytes
101 movq %rdx,152(%rsp)
102 # x1 = j0
103 movq 56(%rsp),%rdi
104 # x0 = x1
105 mov %rdi,%rdx
106 # (uint64) x1 >>= 32
107 shr $32,%rdi
108 # x3 = j2
109 movq 64(%rsp),%rsi
110 # x2 = x3
111 mov %rsi,%rcx
112 # (uint64) x3 >>= 32
113 shr $32,%rsi
114 # x5 = j4
115 movq 72(%rsp),%r8
116 # x4 = x5
117 mov %r8,%r9
118 # (uint64) x5 >>= 32
119 shr $32,%r8
120 # x5_stack = x5
121 movq %r8,160(%rsp)
122 # x7 = j6
123 movq 80(%rsp),%r8
124 # x6 = x7
125 mov %r8,%rax
126 # (uint64) x7 >>= 32
127 shr $32,%r8
128 # x9 = j8
129 movq 88(%rsp),%r10
130 # x8 = x9
131 mov %r10,%r11
132 # (uint64) x9 >>= 32
133 shr $32,%r10
134 # x11 = j10
135 movq 96(%rsp),%r12
136 # x10 = x11
137 mov %r12,%r13
138 # x10_stack = x10
139 movq %r13,168(%rsp)
140 # (uint64) x11 >>= 32
141 shr $32,%r12
142 # x13 = j12
143 movq 104(%rsp),%r13
144 # x12 = x13
145 mov %r13,%r14
146 # (uint64) x13 >>= 32
147 shr $32,%r13
148 # x15 = j14
149 movq 112(%rsp),%r15
150 # x14 = x15
151 mov %r15,%rbx
152 # (uint64) x15 >>= 32
153 shr $32,%r15
154 # x15_stack = x15
155 movq %r15,176(%rsp)
156 # i = 20
157 mov $20,%r15
158# mainloop:
159._mainloop:
160 # i_backup = i
161 movq %r15,184(%rsp)
162 # x5 = x5_stack
163 movq 160(%rsp),%r15
164 # a = x12 + x0
165 lea (%r14,%rdx),%rbp
166 # (uint32) a <<<= 7
167 rol $7,%ebp
168 # x4 ^= a
169 xor %rbp,%r9
170 # b = x1 + x5
171 lea (%rdi,%r15),%rbp
172 # (uint32) b <<<= 7
173 rol $7,%ebp
174 # x9 ^= b
175 xor %rbp,%r10
176 # a = x0 + x4
177 lea (%rdx,%r9),%rbp
178 # (uint32) a <<<= 9
179 rol $9,%ebp
180 # x8 ^= a
181 xor %rbp,%r11
182 # b = x5 + x9
183 lea (%r15,%r10),%rbp
184 # (uint32) b <<<= 9
185 rol $9,%ebp
186 # x13 ^= b
187 xor %rbp,%r13
188 # a = x4 + x8
189 lea (%r9,%r11),%rbp
190 # (uint32) a <<<= 13
191 rol $13,%ebp
192 # x12 ^= a
193 xor %rbp,%r14
194 # b = x9 + x13
195 lea (%r10,%r13),%rbp
196 # (uint32) b <<<= 13
197 rol $13,%ebp
198 # x1 ^= b
199 xor %rbp,%rdi
200 # a = x8 + x12
201 lea (%r11,%r14),%rbp
202 # (uint32) a <<<= 18
203 rol $18,%ebp
204 # x0 ^= a
205 xor %rbp,%rdx
206 # b = x13 + x1
207 lea (%r13,%rdi),%rbp
208 # (uint32) b <<<= 18
209 rol $18,%ebp
210 # x5 ^= b
211 xor %rbp,%r15
212 # x10 = x10_stack
213 movq 168(%rsp),%rbp
214 # x5_stack = x5
215 movq %r15,160(%rsp)
216 # c = x6 + x10
217 lea (%rax,%rbp),%r15
218 # (uint32) c <<<= 7
219 rol $7,%r15d
220 # x14 ^= c
221 xor %r15,%rbx
222 # c = x10 + x14
223 lea (%rbp,%rbx),%r15
224 # (uint32) c <<<= 9
225 rol $9,%r15d
226 # x2 ^= c
227 xor %r15,%rcx
228 # c = x14 + x2
229 lea (%rbx,%rcx),%r15
230 # (uint32) c <<<= 13
231 rol $13,%r15d
232 # x6 ^= c
233 xor %r15,%rax
234 # c = x2 + x6
235 lea (%rcx,%rax),%r15
236 # (uint32) c <<<= 18
237 rol $18,%r15d
238 # x10 ^= c
239 xor %r15,%rbp
240 # x15 = x15_stack
241 movq 176(%rsp),%r15
242 # x10_stack = x10
243 movq %rbp,168(%rsp)
244 # d = x11 + x15
245 lea (%r12,%r15),%rbp
246 # (uint32) d <<<= 7
247 rol $7,%ebp
248 # x3 ^= d
249 xor %rbp,%rsi
250 # d = x15 + x3
251 lea (%r15,%rsi),%rbp
252 # (uint32) d <<<= 9
253 rol $9,%ebp
254 # x7 ^= d
255 xor %rbp,%r8
256 # d = x3 + x7
257 lea (%rsi,%r8),%rbp
258 # (uint32) d <<<= 13
259 rol $13,%ebp
260 # x11 ^= d
261 xor %rbp,%r12
262 # d = x7 + x11
263 lea (%r8,%r12),%rbp
264 # (uint32) d <<<= 18
265 rol $18,%ebp
266 # x15 ^= d
267 xor %rbp,%r15
268 # x15_stack = x15
269 movq %r15,176(%rsp)
270 # x5 = x5_stack
271 movq 160(%rsp),%r15
272 # a = x3 + x0
273 lea (%rsi,%rdx),%rbp
274 # (uint32) a <<<= 7
275 rol $7,%ebp
276 # x1 ^= a
277 xor %rbp,%rdi
278 # b = x4 + x5
279 lea (%r9,%r15),%rbp
280 # (uint32) b <<<= 7
281 rol $7,%ebp
282 # x6 ^= b
283 xor %rbp,%rax
284 # a = x0 + x1
285 lea (%rdx,%rdi),%rbp
286 # (uint32) a <<<= 9
287 rol $9,%ebp
288 # x2 ^= a
289 xor %rbp,%rcx
290 # b = x5 + x6
291 lea (%r15,%rax),%rbp
292 # (uint32) b <<<= 9
293 rol $9,%ebp
294 # x7 ^= b
295 xor %rbp,%r8
296 # a = x1 + x2
297 lea (%rdi,%rcx),%rbp
298 # (uint32) a <<<= 13
299 rol $13,%ebp
300 # x3 ^= a
301 xor %rbp,%rsi
302 # b = x6 + x7
303 lea (%rax,%r8),%rbp
304 # (uint32) b <<<= 13
305 rol $13,%ebp
306 # x4 ^= b
307 xor %rbp,%r9
308 # a = x2 + x3
309 lea (%rcx,%rsi),%rbp
310 # (uint32) a <<<= 18
311 rol $18,%ebp
312 # x0 ^= a
313 xor %rbp,%rdx
314 # b = x7 + x4
315 lea (%r8,%r9),%rbp
316 # (uint32) b <<<= 18
317 rol $18,%ebp
318 # x5 ^= b
319 xor %rbp,%r15
320 # x10 = x10_stack
321 movq 168(%rsp),%rbp
322 # x5_stack = x5
323 movq %r15,160(%rsp)
324 # c = x9 + x10
325 lea (%r10,%rbp),%r15
326 # (uint32) c <<<= 7
327 rol $7,%r15d
328 # x11 ^= c
329 xor %r15,%r12
330 # c = x10 + x11
331 lea (%rbp,%r12),%r15
332 # (uint32) c <<<= 9
333 rol $9,%r15d
334 # x8 ^= c
335 xor %r15,%r11
336 # c = x11 + x8
337 lea (%r12,%r11),%r15
338 # (uint32) c <<<= 13
339 rol $13,%r15d
340 # x9 ^= c
341 xor %r15,%r10
342 # c = x8 + x9
343 lea (%r11,%r10),%r15
344 # (uint32) c <<<= 18
345 rol $18,%r15d
346 # x10 ^= c
347 xor %r15,%rbp
348 # x15 = x15_stack
349 movq 176(%rsp),%r15
350 # x10_stack = x10
351 movq %rbp,168(%rsp)
352 # d = x14 + x15
353 lea (%rbx,%r15),%rbp
354 # (uint32) d <<<= 7
355 rol $7,%ebp
356 # x12 ^= d
357 xor %rbp,%r14
358 # d = x15 + x12
359 lea (%r15,%r14),%rbp
360 # (uint32) d <<<= 9
361 rol $9,%ebp
362 # x13 ^= d
363 xor %rbp,%r13
364 # d = x12 + x13
365 lea (%r14,%r13),%rbp
366 # (uint32) d <<<= 13
367 rol $13,%ebp
368 # x14 ^= d
369 xor %rbp,%rbx
370 # d = x13 + x14
371 lea (%r13,%rbx),%rbp
372 # (uint32) d <<<= 18
373 rol $18,%ebp
374 # x15 ^= d
375 xor %rbp,%r15
376 # x15_stack = x15
377 movq %r15,176(%rsp)
378 # x5 = x5_stack
379 movq 160(%rsp),%r15
380 # a = x12 + x0
381 lea (%r14,%rdx),%rbp
382 # (uint32) a <<<= 7
383 rol $7,%ebp
384 # x4 ^= a
385 xor %rbp,%r9
386 # b = x1 + x5
387 lea (%rdi,%r15),%rbp
388 # (uint32) b <<<= 7
389 rol $7,%ebp
390 # x9 ^= b
391 xor %rbp,%r10
392 # a = x0 + x4
393 lea (%rdx,%r9),%rbp
394 # (uint32) a <<<= 9
395 rol $9,%ebp
396 # x8 ^= a
397 xor %rbp,%r11
398 # b = x5 + x9
399 lea (%r15,%r10),%rbp
400 # (uint32) b <<<= 9
401 rol $9,%ebp
402 # x13 ^= b
403 xor %rbp,%r13
404 # a = x4 + x8
405 lea (%r9,%r11),%rbp
406 # (uint32) a <<<= 13
407 rol $13,%ebp
408 # x12 ^= a
409 xor %rbp,%r14
410 # b = x9 + x13
411 lea (%r10,%r13),%rbp
412 # (uint32) b <<<= 13
413 rol $13,%ebp
414 # x1 ^= b
415 xor %rbp,%rdi
416 # a = x8 + x12
417 lea (%r11,%r14),%rbp
418 # (uint32) a <<<= 18
419 rol $18,%ebp
420 # x0 ^= a
421 xor %rbp,%rdx
422 # b = x13 + x1
423 lea (%r13,%rdi),%rbp
424 # (uint32) b <<<= 18
425 rol $18,%ebp
426 # x5 ^= b
427 xor %rbp,%r15
428 # x10 = x10_stack
429 movq 168(%rsp),%rbp
430 # x5_stack = x5
431 movq %r15,160(%rsp)
432 # c = x6 + x10
433 lea (%rax,%rbp),%r15
434 # (uint32) c <<<= 7
435 rol $7,%r15d
436 # x14 ^= c
437 xor %r15,%rbx
438 # c = x10 + x14
439 lea (%rbp,%rbx),%r15
440 # (uint32) c <<<= 9
441 rol $9,%r15d
442 # x2 ^= c
443 xor %r15,%rcx
444 # c = x14 + x2
445 lea (%rbx,%rcx),%r15
446 # (uint32) c <<<= 13
447 rol $13,%r15d
448 # x6 ^= c
449 xor %r15,%rax
450 # c = x2 + x6
451 lea (%rcx,%rax),%r15
452 # (uint32) c <<<= 18
453 rol $18,%r15d
454 # x10 ^= c
455 xor %r15,%rbp
456 # x15 = x15_stack
457 movq 176(%rsp),%r15
458 # x10_stack = x10
459 movq %rbp,168(%rsp)
460 # d = x11 + x15
461 lea (%r12,%r15),%rbp
462 # (uint32) d <<<= 7
463 rol $7,%ebp
464 # x3 ^= d
465 xor %rbp,%rsi
466 # d = x15 + x3
467 lea (%r15,%rsi),%rbp
468 # (uint32) d <<<= 9
469 rol $9,%ebp
470 # x7 ^= d
471 xor %rbp,%r8
472 # d = x3 + x7
473 lea (%rsi,%r8),%rbp
474 # (uint32) d <<<= 13
475 rol $13,%ebp
476 # x11 ^= d
477 xor %rbp,%r12
478 # d = x7 + x11
479 lea (%r8,%r12),%rbp
480 # (uint32) d <<<= 18
481 rol $18,%ebp
482 # x15 ^= d
483 xor %rbp,%r15
484 # x15_stack = x15
485 movq %r15,176(%rsp)
486 # x5 = x5_stack
487 movq 160(%rsp),%r15
488 # a = x3 + x0
489 lea (%rsi,%rdx),%rbp
490 # (uint32) a <<<= 7
491 rol $7,%ebp
492 # x1 ^= a
493 xor %rbp,%rdi
494 # b = x4 + x5
495 lea (%r9,%r15),%rbp
496 # (uint32) b <<<= 7
497 rol $7,%ebp
498 # x6 ^= b
499 xor %rbp,%rax
500 # a = x0 + x1
501 lea (%rdx,%rdi),%rbp
502 # (uint32) a <<<= 9
503 rol $9,%ebp
504 # x2 ^= a
505 xor %rbp,%rcx
506 # b = x5 + x6
507 lea (%r15,%rax),%rbp
508 # (uint32) b <<<= 9
509 rol $9,%ebp
510 # x7 ^= b
511 xor %rbp,%r8
512 # a = x1 + x2
513 lea (%rdi,%rcx),%rbp
514 # (uint32) a <<<= 13
515 rol $13,%ebp
516 # x3 ^= a
517 xor %rbp,%rsi
518 # b = x6 + x7
519 lea (%rax,%r8),%rbp
520 # (uint32) b <<<= 13
521 rol $13,%ebp
522 # x4 ^= b
523 xor %rbp,%r9
524 # a = x2 + x3
525 lea (%rcx,%rsi),%rbp
526 # (uint32) a <<<= 18
527 rol $18,%ebp
528 # x0 ^= a
529 xor %rbp,%rdx
530 # b = x7 + x4
531 lea (%r8,%r9),%rbp
532 # (uint32) b <<<= 18
533 rol $18,%ebp
534 # x5 ^= b
535 xor %rbp,%r15
536 # x10 = x10_stack
537 movq 168(%rsp),%rbp
538 # x5_stack = x5
539 movq %r15,160(%rsp)
540 # c = x9 + x10
541 lea (%r10,%rbp),%r15
542 # (uint32) c <<<= 7
543 rol $7,%r15d
544 # x11 ^= c
545 xor %r15,%r12
546 # c = x10 + x11
547 lea (%rbp,%r12),%r15
548 # (uint32) c <<<= 9
549 rol $9,%r15d
550 # x8 ^= c
551 xor %r15,%r11
552 # c = x11 + x8
553 lea (%r12,%r11),%r15
554 # (uint32) c <<<= 13
555 rol $13,%r15d
556 # x9 ^= c
557 xor %r15,%r10
558 # c = x8 + x9
559 lea (%r11,%r10),%r15
560 # (uint32) c <<<= 18
561 rol $18,%r15d
562 # x10 ^= c
563 xor %r15,%rbp
564 # x15 = x15_stack
565 movq 176(%rsp),%r15
566 # x10_stack = x10
567 movq %rbp,168(%rsp)
568 # d = x14 + x15
569 lea (%rbx,%r15),%rbp
570 # (uint32) d <<<= 7
571 rol $7,%ebp
572 # x12 ^= d
573 xor %rbp,%r14
574 # d = x15 + x12
575 lea (%r15,%r14),%rbp
576 # (uint32) d <<<= 9
577 rol $9,%ebp
578 # x13 ^= d
579 xor %rbp,%r13
580 # d = x12 + x13
581 lea (%r14,%r13),%rbp
582 # (uint32) d <<<= 13
583 rol $13,%ebp
584 # x14 ^= d
585 xor %rbp,%rbx
586 # d = x13 + x14
587 lea (%r13,%rbx),%rbp
588 # (uint32) d <<<= 18
589 rol $18,%ebp
590 # x15 ^= d
591 xor %rbp,%r15
592 # x15_stack = x15
593 movq %r15,176(%rsp)
594 # i = i_backup
595 movq 184(%rsp),%r15
596 # unsigned>? i -= 4
597 sub $4,%r15
598 # comment:fp stack unchanged by jump
599 # goto mainloop if unsigned>
600 ja ._mainloop
601 # (uint32) x2 += j2
602 addl 64(%rsp),%ecx
603 # x3 <<= 32
604 shl $32,%rsi
605 # x3 += j2
606 addq 64(%rsp),%rsi
607 # (uint64) x3 >>= 32
608 shr $32,%rsi
609 # x3 <<= 32
610 shl $32,%rsi
611 # x2 += x3
612 add %rsi,%rcx
613 # (uint32) x6 += j6
614 addl 80(%rsp),%eax
615 # x7 <<= 32
616 shl $32,%r8
617 # x7 += j6
618 addq 80(%rsp),%r8
619 # (uint64) x7 >>= 32
620 shr $32,%r8
621 # x7 <<= 32
622 shl $32,%r8
623 # x6 += x7
624 add %r8,%rax
625 # (uint32) x8 += j8
626 addl 88(%rsp),%r11d
627 # x9 <<= 32
628 shl $32,%r10
629 # x9 += j8
630 addq 88(%rsp),%r10
631 # (uint64) x9 >>= 32
632 shr $32,%r10
633 # x9 <<= 32
634 shl $32,%r10
635 # x8 += x9
636 add %r10,%r11
637 # (uint32) x12 += j12
638 addl 104(%rsp),%r14d
639 # x13 <<= 32
640 shl $32,%r13
641 # x13 += j12
642 addq 104(%rsp),%r13
643 # (uint64) x13 >>= 32
644 shr $32,%r13
645 # x13 <<= 32
646 shl $32,%r13
647 # x12 += x13
648 add %r13,%r14
649 # (uint32) x0 += j0
650 addl 56(%rsp),%edx
651 # x1 <<= 32
652 shl $32,%rdi
653 # x1 += j0
654 addq 56(%rsp),%rdi
655 # (uint64) x1 >>= 32
656 shr $32,%rdi
657 # x1 <<= 32
658 shl $32,%rdi
659 # x0 += x1
660 add %rdi,%rdx
661 # x5 = x5_stack
662 movq 160(%rsp),%rdi
663 # (uint32) x4 += j4
664 addl 72(%rsp),%r9d
665 # x5 <<= 32
666 shl $32,%rdi
667 # x5 += j4
668 addq 72(%rsp),%rdi
669 # (uint64) x5 >>= 32
670 shr $32,%rdi
671 # x5 <<= 32
672 shl $32,%rdi
673 # x4 += x5
674 add %rdi,%r9
675 # x10 = x10_stack
676 movq 168(%rsp),%r8
677 # (uint32) x10 += j10
678 addl 96(%rsp),%r8d
679 # x11 <<= 32
680 shl $32,%r12
681 # x11 += j10
682 addq 96(%rsp),%r12
683 # (uint64) x11 >>= 32
684 shr $32,%r12
685 # x11 <<= 32
686 shl $32,%r12
687 # x10 += x11
688 add %r12,%r8
689 # x15 = x15_stack
690 movq 176(%rsp),%rdi
691 # (uint32) x14 += j14
692 addl 112(%rsp),%ebx
693 # x15 <<= 32
694 shl $32,%rdi
695 # x15 += j14
696 addq 112(%rsp),%rdi
697 # (uint64) x15 >>= 32
698 shr $32,%rdi
699 # x15 <<= 32
700 shl $32,%rdi
701 # x14 += x15
702 add %rdi,%rbx
703 # out = out_backup
704 movq 136(%rsp),%rdi
705 # m = m_backup
706 movq 144(%rsp),%rsi
707 # x0 ^= *(uint64 *) (m + 0)
708 xorq 0(%rsi),%rdx
709 # *(uint64 *) (out + 0) = x0
710 movq %rdx,0(%rdi)
711 # x2 ^= *(uint64 *) (m + 8)
712 xorq 8(%rsi),%rcx
713 # *(uint64 *) (out + 8) = x2
714 movq %rcx,8(%rdi)
715 # x4 ^= *(uint64 *) (m + 16)
716 xorq 16(%rsi),%r9
717 # *(uint64 *) (out + 16) = x4
718 movq %r9,16(%rdi)
719 # x6 ^= *(uint64 *) (m + 24)
720 xorq 24(%rsi),%rax
721 # *(uint64 *) (out + 24) = x6
722 movq %rax,24(%rdi)
723 # x8 ^= *(uint64 *) (m + 32)
724 xorq 32(%rsi),%r11
725 # *(uint64 *) (out + 32) = x8
726 movq %r11,32(%rdi)
727 # x10 ^= *(uint64 *) (m + 40)
728 xorq 40(%rsi),%r8
729 # *(uint64 *) (out + 40) = x10
730 movq %r8,40(%rdi)
731 # x12 ^= *(uint64 *) (m + 48)
732 xorq 48(%rsi),%r14
733 # *(uint64 *) (out + 48) = x12
734 movq %r14,48(%rdi)
735 # x14 ^= *(uint64 *) (m + 56)
736 xorq 56(%rsi),%rbx
737 # *(uint64 *) (out + 56) = x14
738 movq %rbx,56(%rdi)
739 # bytes = bytes_backup
740 movq 152(%rsp),%rdx
741 # in8 = j8
742 movq 88(%rsp),%rcx
743 # in8 += 1
744 add $1,%rcx
745 # j8 = in8
746 movq %rcx,88(%rsp)
747 # unsigned>? unsigned<? bytes - 64
748 cmp $64,%rdx
749 # comment:fp stack unchanged by jump
750 # goto bytesatleast65 if unsigned>
751 ja ._bytesatleast65
752 # comment:fp stack unchanged by jump
753 # goto bytesatleast64 if !unsigned<
754 jae ._bytesatleast64
755 # m = out
756 mov %rdi,%rsi
757 # out = ctarget
758 movq 128(%rsp),%rdi
759 # i = bytes
760 mov %rdx,%rcx
761 # while (i) { *out++ = *m++; --i }
762 rep movsb
763 # comment:fp stack unchanged by fallthrough
764# bytesatleast64:
765._bytesatleast64:
766 # x = x_backup
767 movq 120(%rsp),%rdi
768 # in8 = j8
769 movq 88(%rsp),%rsi
770 # *(uint64 *) (x + 32) = in8
771 movq %rsi,32(%rdi)
772 # r11 = r11_stack
773 movq 0(%rsp),%r11
774 # r12 = r12_stack
775 movq 8(%rsp),%r12
776 # r13 = r13_stack
777 movq 16(%rsp),%r13
778 # r14 = r14_stack
779 movq 24(%rsp),%r14
780 # r15 = r15_stack
781 movq 32(%rsp),%r15
782 # rbx = rbx_stack
783 movq 40(%rsp),%rbx
784 # rbp = rbp_stack
785 movq 48(%rsp),%rbp
786 # comment:fp stack unchanged by fallthrough
787# done:
788._done:
789 # leave
790 add %r11,%rsp
791 mov %rdi,%rax
792 mov %rsi,%rdx
793 ret
794# bytesatleast65:
795._bytesatleast65:
796 # bytes -= 64
797 sub $64,%rdx
798 # out += 64
799 add $64,%rdi
800 # m += 64
801 add $64,%rsi
802 # comment:fp stack unchanged by jump
803 # goto bytesatleast1
804 jmp ._bytesatleast1
805# enter ECRYPT_keysetup
806.text
807.p2align 5
808.globl ECRYPT_keysetup
809ECRYPT_keysetup:
810 mov %rsp,%r11
811 and $31,%r11
812 add $256,%r11
813 sub %r11,%rsp
814 # k = arg2
815 mov %rsi,%rsi
816 # kbits = arg3
817 mov %rdx,%rdx
818 # x = arg1
819 mov %rdi,%rdi
820 # in0 = *(uint64 *) (k + 0)
821 movq 0(%rsi),%r8
822 # in2 = *(uint64 *) (k + 8)
823 movq 8(%rsi),%r9
824 # *(uint64 *) (x + 4) = in0
825 movq %r8,4(%rdi)
826 # *(uint64 *) (x + 12) = in2
827 movq %r9,12(%rdi)
828 # unsigned<? kbits - 256
829 cmp $256,%rdx
830 # comment:fp stack unchanged by jump
831 # goto kbits128 if unsigned<
832 jb ._kbits128
833# kbits256:
834._kbits256:
835 # in10 = *(uint64 *) (k + 16)
836 movq 16(%rsi),%rdx
837 # in12 = *(uint64 *) (k + 24)
838 movq 24(%rsi),%rsi
839 # *(uint64 *) (x + 44) = in10
840 movq %rdx,44(%rdi)
841 # *(uint64 *) (x + 52) = in12
842 movq %rsi,52(%rdi)
843 # in0 = 1634760805
844 mov $1634760805,%rsi
845 # in4 = 857760878
846 mov $857760878,%rdx
847 # in10 = 2036477234
848 mov $2036477234,%rcx
849 # in14 = 1797285236
850 mov $1797285236,%r8
851 # *(uint32 *) (x + 0) = in0
852 movl %esi,0(%rdi)
853 # *(uint32 *) (x + 20) = in4
854 movl %edx,20(%rdi)
855 # *(uint32 *) (x + 40) = in10
856 movl %ecx,40(%rdi)
857 # *(uint32 *) (x + 60) = in14
858 movl %r8d,60(%rdi)
859 # comment:fp stack unchanged by jump
860 # goto keysetupdone
861 jmp ._keysetupdone
862# kbits128:
863._kbits128:
864 # in10 = *(uint64 *) (k + 0)
865 movq 0(%rsi),%rdx
866 # in12 = *(uint64 *) (k + 8)
867 movq 8(%rsi),%rsi
868 # *(uint64 *) (x + 44) = in10
869 movq %rdx,44(%rdi)
870 # *(uint64 *) (x + 52) = in12
871 movq %rsi,52(%rdi)
872 # in0 = 1634760805
873 mov $1634760805,%rsi
874 # in4 = 824206446
875 mov $824206446,%rdx
876 # in10 = 2036477238
877 mov $2036477238,%rcx
878 # in14 = 1797285236
879 mov $1797285236,%r8
880 # *(uint32 *) (x + 0) = in0
881 movl %esi,0(%rdi)
882 # *(uint32 *) (x + 20) = in4
883 movl %edx,20(%rdi)
884 # *(uint32 *) (x + 40) = in10
885 movl %ecx,40(%rdi)
886 # *(uint32 *) (x + 60) = in14
887 movl %r8d,60(%rdi)
888# keysetupdone:
889._keysetupdone:
890 # leave
891 add %r11,%rsp
892 mov %rdi,%rax
893 mov %rsi,%rdx
894 ret
895# enter ECRYPT_ivsetup
896.text
897.p2align 5
898.globl ECRYPT_ivsetup
899ECRYPT_ivsetup:
900 mov %rsp,%r11
901 and $31,%r11
902 add $256,%r11
903 sub %r11,%rsp
904 # iv = arg2
905 mov %rsi,%rsi
906 # x = arg1
907 mov %rdi,%rdi
908 # in6 = *(uint64 *) (iv + 0)
909 movq 0(%rsi),%rsi
910 # in8 = 0
911 mov $0,%r8
912 # *(uint64 *) (x + 24) = in6
913 movq %rsi,24(%rdi)
914 # *(uint64 *) (x + 32) = in8
915 movq %r8,32(%rdi)
916 # leave
917 add %r11,%rsp
918 mov %rdi,%rax
919 mov %rsi,%rdx
920 ret
diff --git a/arch/x86/crypto/salsa20_glue.c b/arch/x86/crypto/salsa20_glue.c
new file mode 100644
index 000000000000..bccb76d80987
--- /dev/null
+++ b/arch/x86/crypto/salsa20_glue.c
@@ -0,0 +1,129 @@
1/*
2 * Glue code for optimized assembly version of Salsa20.
3 *
4 * Copyright (c) 2007 Tan Swee Heng <thesweeheng@gmail.com>
5 *
6 * The assembly codes are public domain assembly codes written by Daniel. J.
7 * Bernstein <djb@cr.yp.to>. The codes are modified to include indentation
8 * and to remove extraneous comments and functions that are not needed.
9 * - i586 version, renamed as salsa20-i586-asm_32.S
10 * available from <http://cr.yp.to/snuffle/salsa20/x86-pm/salsa20.s>
11 * - x86-64 version, renamed as salsa20-x86_64-asm_64.S
12 * available from <http://cr.yp.to/snuffle/salsa20/amd64-3/salsa20.s>
13 *
14 * This program is free software; you can redistribute it and/or modify it
15 * under the terms of the GNU General Public License as published by the Free
16 * Software Foundation; either version 2 of the License, or (at your option)
17 * any later version.
18 *
19 */
20
21#include <crypto/algapi.h>
22#include <linux/module.h>
23#include <linux/crypto.h>
24
25#define SALSA20_IV_SIZE 8U
26#define SALSA20_MIN_KEY_SIZE 16U
27#define SALSA20_MAX_KEY_SIZE 32U
28
29// use the ECRYPT_* function names
30#define salsa20_keysetup ECRYPT_keysetup
31#define salsa20_ivsetup ECRYPT_ivsetup
32#define salsa20_encrypt_bytes ECRYPT_encrypt_bytes
33
34struct salsa20_ctx
35{
36 u32 input[16];
37};
38
39asmlinkage void salsa20_keysetup(struct salsa20_ctx *ctx, const u8 *k,
40 u32 keysize, u32 ivsize);
41asmlinkage void salsa20_ivsetup(struct salsa20_ctx *ctx, const u8 *iv);
42asmlinkage void salsa20_encrypt_bytes(struct salsa20_ctx *ctx,
43 const u8 *src, u8 *dst, u32 bytes);
44
45static int setkey(struct crypto_tfm *tfm, const u8 *key,
46 unsigned int keysize)
47{
48 struct salsa20_ctx *ctx = crypto_tfm_ctx(tfm);
49 salsa20_keysetup(ctx, key, keysize*8, SALSA20_IV_SIZE*8);
50 return 0;
51}
52
53static int encrypt(struct blkcipher_desc *desc,
54 struct scatterlist *dst, struct scatterlist *src,
55 unsigned int nbytes)
56{
57 struct blkcipher_walk walk;
58 struct crypto_blkcipher *tfm = desc->tfm;
59 struct salsa20_ctx *ctx = crypto_blkcipher_ctx(tfm);
60 int err;
61
62 blkcipher_walk_init(&walk, dst, src, nbytes);
63 err = blkcipher_walk_virt_block(desc, &walk, 64);
64
65 salsa20_ivsetup(ctx, walk.iv);
66
67 if (likely(walk.nbytes == nbytes))
68 {
69 salsa20_encrypt_bytes(ctx, walk.src.virt.addr,
70 walk.dst.virt.addr, nbytes);
71 return blkcipher_walk_done(desc, &walk, 0);
72 }
73
74 while (walk.nbytes >= 64) {
75 salsa20_encrypt_bytes(ctx, walk.src.virt.addr,
76 walk.dst.virt.addr,
77 walk.nbytes - (walk.nbytes % 64));
78 err = blkcipher_walk_done(desc, &walk, walk.nbytes % 64);
79 }
80
81 if (walk.nbytes) {
82 salsa20_encrypt_bytes(ctx, walk.src.virt.addr,
83 walk.dst.virt.addr, walk.nbytes);
84 err = blkcipher_walk_done(desc, &walk, 0);
85 }
86
87 return err;
88}
89
90static struct crypto_alg alg = {
91 .cra_name = "salsa20",
92 .cra_driver_name = "salsa20-asm",
93 .cra_priority = 200,
94 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
95 .cra_type = &crypto_blkcipher_type,
96 .cra_blocksize = 1,
97 .cra_ctxsize = sizeof(struct salsa20_ctx),
98 .cra_alignmask = 3,
99 .cra_module = THIS_MODULE,
100 .cra_list = LIST_HEAD_INIT(alg.cra_list),
101 .cra_u = {
102 .blkcipher = {
103 .setkey = setkey,
104 .encrypt = encrypt,
105 .decrypt = encrypt,
106 .min_keysize = SALSA20_MIN_KEY_SIZE,
107 .max_keysize = SALSA20_MAX_KEY_SIZE,
108 .ivsize = SALSA20_IV_SIZE,
109 }
110 }
111};
112
113static int __init init(void)
114{
115 return crypto_register_alg(&alg);
116}
117
118static void __exit fini(void)
119{
120 crypto_unregister_alg(&alg);
121}
122
123module_init(init);
124module_exit(fini);
125
126MODULE_LICENSE("GPL");
127MODULE_DESCRIPTION ("Salsa20 stream cipher algorithm (optimized assembly version)");
128MODULE_ALIAS("salsa20");
129MODULE_ALIAS("salsa20-asm");
diff --git a/arch/x86/crypto/twofish_64.c b/arch/x86/crypto/twofish_64.c
deleted file mode 100644
index 182d91d5cfb9..000000000000
--- a/arch/x86/crypto/twofish_64.c
+++ /dev/null
@@ -1,97 +0,0 @@
1/*
2 * Glue Code for optimized x86_64 assembler version of TWOFISH
3 *
4 * Originally Twofish for GPG
5 * By Matthew Skala <mskala@ansuz.sooke.bc.ca>, July 26, 1998
6 * 256-bit key length added March 20, 1999
7 * Some modifications to reduce the text size by Werner Koch, April, 1998
8 * Ported to the kerneli patch by Marc Mutz <Marc@Mutz.com>
9 * Ported to CryptoAPI by Colin Slater <hoho@tacomeat.net>
10 *
11 * The original author has disclaimed all copyright interest in this
12 * code and thus put it in the public domain. The subsequent authors
13 * have put this under the GNU General Public License.
14 *
15 * This program is free software; you can redistribute it and/or modify
16 * it under the terms of the GNU General Public License as published by
17 * the Free Software Foundation; either version 2 of the License, or
18 * (at your option) any later version.
19 *
20 * This program is distributed in the hope that it will be useful,
21 * but WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23 * GNU General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with this program; if not, write to the Free Software
27 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
28 * USA
29 *
30 * This code is a "clean room" implementation, written from the paper
31 * _Twofish: A 128-Bit Block Cipher_ by Bruce Schneier, John Kelsey,
32 * Doug Whiting, David Wagner, Chris Hall, and Niels Ferguson, available
33 * through http://www.counterpane.com/twofish.html
34 *
35 * For background information on multiplication in finite fields, used for
36 * the matrix operations in the key schedule, see the book _Contemporary
37 * Abstract Algebra_ by Joseph A. Gallian, especially chapter 22 in the
38 * Third Edition.
39 */
40
41#include <crypto/twofish.h>
42#include <linux/crypto.h>
43#include <linux/init.h>
44#include <linux/kernel.h>
45#include <linux/module.h>
46#include <linux/types.h>
47
48asmlinkage void twofish_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
49asmlinkage void twofish_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
50
51static void twofish_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
52{
53 twofish_enc_blk(tfm, dst, src);
54}
55
56static void twofish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
57{
58 twofish_dec_blk(tfm, dst, src);
59}
60
61static struct crypto_alg alg = {
62 .cra_name = "twofish",
63 .cra_driver_name = "twofish-x86_64",
64 .cra_priority = 200,
65 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
66 .cra_blocksize = TF_BLOCK_SIZE,
67 .cra_ctxsize = sizeof(struct twofish_ctx),
68 .cra_alignmask = 3,
69 .cra_module = THIS_MODULE,
70 .cra_list = LIST_HEAD_INIT(alg.cra_list),
71 .cra_u = {
72 .cipher = {
73 .cia_min_keysize = TF_MIN_KEY_SIZE,
74 .cia_max_keysize = TF_MAX_KEY_SIZE,
75 .cia_setkey = twofish_setkey,
76 .cia_encrypt = twofish_encrypt,
77 .cia_decrypt = twofish_decrypt
78 }
79 }
80};
81
82static int __init init(void)
83{
84 return crypto_register_alg(&alg);
85}
86
87static void __exit fini(void)
88{
89 crypto_unregister_alg(&alg);
90}
91
92module_init(init);
93module_exit(fini);
94
95MODULE_LICENSE("GPL");
96MODULE_DESCRIPTION ("Twofish Cipher Algorithm, x86_64 asm optimized");
97MODULE_ALIAS("twofish");
diff --git a/arch/x86/crypto/twofish_32.c b/arch/x86/crypto/twofish_glue.c
index e3004dfe9c7a..cefaf8b9aa18 100644
--- a/arch/x86/crypto/twofish_32.c
+++ b/arch/x86/crypto/twofish_glue.c
@@ -1,5 +1,5 @@
1/* 1/*
2 * Glue Code for optimized 586 assembler version of TWOFISH 2 * Glue Code for assembler optimized version of TWOFISH
3 * 3 *
4 * Originally Twofish for GPG 4 * Originally Twofish for GPG
5 * By Matthew Skala <mskala@ansuz.sooke.bc.ca>, July 26, 1998 5 * By Matthew Skala <mskala@ansuz.sooke.bc.ca>, July 26, 1998
@@ -44,7 +44,6 @@
44#include <linux/module.h> 44#include <linux/module.h>
45#include <linux/types.h> 45#include <linux/types.h>
46 46
47
48asmlinkage void twofish_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 47asmlinkage void twofish_enc_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
49asmlinkage void twofish_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src); 48asmlinkage void twofish_dec_blk(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
50 49
@@ -60,7 +59,7 @@ static void twofish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
60 59
61static struct crypto_alg alg = { 60static struct crypto_alg alg = {
62 .cra_name = "twofish", 61 .cra_name = "twofish",
63 .cra_driver_name = "twofish-i586", 62 .cra_driver_name = "twofish-asm",
64 .cra_priority = 200, 63 .cra_priority = 200,
65 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 64 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
66 .cra_blocksize = TF_BLOCK_SIZE, 65 .cra_blocksize = TF_BLOCK_SIZE,
@@ -93,5 +92,6 @@ module_init(init);
93module_exit(fini); 92module_exit(fini);
94 93
95MODULE_LICENSE("GPL"); 94MODULE_LICENSE("GPL");
96MODULE_DESCRIPTION ("Twofish Cipher Algorithm, i586 asm optimized"); 95MODULE_DESCRIPTION ("Twofish Cipher Algorithm, asm optimized");
97MODULE_ALIAS("twofish"); 96MODULE_ALIAS("twofish");
97MODULE_ALIAS("twofish-asm");
diff --git a/crypto/Kconfig b/crypto/Kconfig
index 083d2e1dfc21..c3166a1a5bb6 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -24,10 +24,6 @@ config CRYPTO_ALGAPI
24 help 24 help
25 This option provides the API for cryptographic algorithms. 25 This option provides the API for cryptographic algorithms.
26 26
27config CRYPTO_ABLKCIPHER
28 tristate
29 select CRYPTO_BLKCIPHER
30
31config CRYPTO_AEAD 27config CRYPTO_AEAD
32 tristate 28 tristate
33 select CRYPTO_ALGAPI 29 select CRYPTO_ALGAPI
@@ -36,6 +32,15 @@ config CRYPTO_BLKCIPHER
36 tristate 32 tristate
37 select CRYPTO_ALGAPI 33 select CRYPTO_ALGAPI
38 34
35config CRYPTO_SEQIV
36 tristate "Sequence Number IV Generator"
37 select CRYPTO_AEAD
38 select CRYPTO_BLKCIPHER
39 help
40 This IV generator generates an IV based on a sequence number by
41 xoring it with a salt. This algorithm is mainly useful for CTR
42 and similar modes.
43
39config CRYPTO_HASH 44config CRYPTO_HASH
40 tristate 45 tristate
41 select CRYPTO_ALGAPI 46 select CRYPTO_ALGAPI
@@ -91,7 +96,7 @@ config CRYPTO_SHA1
91 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). 96 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
92 97
93config CRYPTO_SHA256 98config CRYPTO_SHA256
94 tristate "SHA256 digest algorithm" 99 tristate "SHA224 and SHA256 digest algorithm"
95 select CRYPTO_ALGAPI 100 select CRYPTO_ALGAPI
96 help 101 help
97 SHA256 secure hash standard (DFIPS 180-2). 102 SHA256 secure hash standard (DFIPS 180-2).
@@ -99,6 +104,9 @@ config CRYPTO_SHA256
99 This version of SHA implements a 256 bit hash with 128 bits of 104 This version of SHA implements a 256 bit hash with 128 bits of
100 security against collision attacks. 105 security against collision attacks.
101 106
107 This code also includes SHA-224, a 224 bit hash with 112 bits
108 of security against collision attacks.
109
102config CRYPTO_SHA512 110config CRYPTO_SHA512
103 tristate "SHA384 and SHA512 digest algorithms" 111 tristate "SHA384 and SHA512 digest algorithms"
104 select CRYPTO_ALGAPI 112 select CRYPTO_ALGAPI
@@ -195,9 +203,34 @@ config CRYPTO_XTS
195 key size 256, 384 or 512 bits. This implementation currently 203 key size 256, 384 or 512 bits. This implementation currently
196 can't handle a sectorsize which is not a multiple of 16 bytes. 204 can't handle a sectorsize which is not a multiple of 16 bytes.
197 205
206config CRYPTO_CTR
207 tristate "CTR support"
208 select CRYPTO_BLKCIPHER
209 select CRYPTO_SEQIV
210 select CRYPTO_MANAGER
211 help
212 CTR: Counter mode
213 This block cipher algorithm is required for IPSec.
214
215config CRYPTO_GCM
216 tristate "GCM/GMAC support"
217 select CRYPTO_CTR
218 select CRYPTO_AEAD
219 select CRYPTO_GF128MUL
220 help
221 Support for Galois/Counter Mode (GCM) and Galois Message
222 Authentication Code (GMAC). Required for IPSec.
223
224config CRYPTO_CCM
225 tristate "CCM support"
226 select CRYPTO_CTR
227 select CRYPTO_AEAD
228 help
229 Support for Counter with CBC MAC. Required for IPsec.
230
198config CRYPTO_CRYPTD 231config CRYPTO_CRYPTD
199 tristate "Software async crypto daemon" 232 tristate "Software async crypto daemon"
200 select CRYPTO_ABLKCIPHER 233 select CRYPTO_BLKCIPHER
201 select CRYPTO_MANAGER 234 select CRYPTO_MANAGER
202 help 235 help
203 This is a generic software asynchronous crypto daemon that 236 This is a generic software asynchronous crypto daemon that
@@ -320,6 +353,7 @@ config CRYPTO_AES_586
320 tristate "AES cipher algorithms (i586)" 353 tristate "AES cipher algorithms (i586)"
321 depends on (X86 || UML_X86) && !64BIT 354 depends on (X86 || UML_X86) && !64BIT
322 select CRYPTO_ALGAPI 355 select CRYPTO_ALGAPI
356 select CRYPTO_AES
323 help 357 help
324 AES cipher algorithms (FIPS-197). AES uses the Rijndael 358 AES cipher algorithms (FIPS-197). AES uses the Rijndael
325 algorithm. 359 algorithm.
@@ -341,6 +375,7 @@ config CRYPTO_AES_X86_64
341 tristate "AES cipher algorithms (x86_64)" 375 tristate "AES cipher algorithms (x86_64)"
342 depends on (X86 || UML_X86) && 64BIT 376 depends on (X86 || UML_X86) && 64BIT
343 select CRYPTO_ALGAPI 377 select CRYPTO_ALGAPI
378 select CRYPTO_AES
344 help 379 help
345 AES cipher algorithms (FIPS-197). AES uses the Rijndael 380 AES cipher algorithms (FIPS-197). AES uses the Rijndael
346 algorithm. 381 algorithm.
@@ -441,6 +476,46 @@ config CRYPTO_SEED
441 See also: 476 See also:
442 <http://www.kisa.or.kr/kisa/seed/jsp/seed_eng.jsp> 477 <http://www.kisa.or.kr/kisa/seed/jsp/seed_eng.jsp>
443 478
479config CRYPTO_SALSA20
480 tristate "Salsa20 stream cipher algorithm (EXPERIMENTAL)"
481 depends on EXPERIMENTAL
482 select CRYPTO_BLKCIPHER
483 help
484 Salsa20 stream cipher algorithm.
485
486 Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT
487 Stream Cipher Project. See <http://www.ecrypt.eu.org/stream/>
488
489 The Salsa20 stream cipher algorithm is designed by Daniel J.
490 Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
491
492config CRYPTO_SALSA20_586
493 tristate "Salsa20 stream cipher algorithm (i586) (EXPERIMENTAL)"
494 depends on (X86 || UML_X86) && !64BIT
495 depends on EXPERIMENTAL
496 select CRYPTO_BLKCIPHER
497 help
498 Salsa20 stream cipher algorithm.
499
500 Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT
501 Stream Cipher Project. See <http://www.ecrypt.eu.org/stream/>
502
503 The Salsa20 stream cipher algorithm is designed by Daniel J.
504 Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
505
506config CRYPTO_SALSA20_X86_64
507 tristate "Salsa20 stream cipher algorithm (x86_64) (EXPERIMENTAL)"
508 depends on (X86 || UML_X86) && 64BIT
509 depends on EXPERIMENTAL
510 select CRYPTO_BLKCIPHER
511 help
512 Salsa20 stream cipher algorithm.
513
514 Salsa20 is a stream cipher submitted to eSTREAM, the ECRYPT
515 Stream Cipher Project. See <http://www.ecrypt.eu.org/stream/>
516
517 The Salsa20 stream cipher algorithm is designed by Daniel J.
518 Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
444 519
445config CRYPTO_DEFLATE 520config CRYPTO_DEFLATE
446 tristate "Deflate compression algorithm" 521 tristate "Deflate compression algorithm"
@@ -491,6 +566,7 @@ config CRYPTO_TEST
491 tristate "Testing module" 566 tristate "Testing module"
492 depends on m 567 depends on m
493 select CRYPTO_ALGAPI 568 select CRYPTO_ALGAPI
569 select CRYPTO_AEAD
494 help 570 help
495 Quick & dirty crypto test module. 571 Quick & dirty crypto test module.
496 572
@@ -498,10 +574,19 @@ config CRYPTO_AUTHENC
498 tristate "Authenc support" 574 tristate "Authenc support"
499 select CRYPTO_AEAD 575 select CRYPTO_AEAD
500 select CRYPTO_MANAGER 576 select CRYPTO_MANAGER
577 select CRYPTO_HASH
501 help 578 help
502 Authenc: Combined mode wrapper for IPsec. 579 Authenc: Combined mode wrapper for IPsec.
503 This is required for IPSec. 580 This is required for IPSec.
504 581
582config CRYPTO_LZO
583 tristate "LZO compression algorithm"
584 select CRYPTO_ALGAPI
585 select LZO_COMPRESS
586 select LZO_DECOMPRESS
587 help
588 This is the LZO algorithm.
589
505source "drivers/crypto/Kconfig" 590source "drivers/crypto/Kconfig"
506 591
507endif # if CRYPTO 592endif # if CRYPTO
diff --git a/crypto/Makefile b/crypto/Makefile
index 43c2a0dc9936..48c758379954 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -8,9 +8,14 @@ crypto_algapi-$(CONFIG_PROC_FS) += proc.o
8crypto_algapi-objs := algapi.o scatterwalk.o $(crypto_algapi-y) 8crypto_algapi-objs := algapi.o scatterwalk.o $(crypto_algapi-y)
9obj-$(CONFIG_CRYPTO_ALGAPI) += crypto_algapi.o 9obj-$(CONFIG_CRYPTO_ALGAPI) += crypto_algapi.o
10 10
11obj-$(CONFIG_CRYPTO_ABLKCIPHER) += ablkcipher.o
12obj-$(CONFIG_CRYPTO_AEAD) += aead.o 11obj-$(CONFIG_CRYPTO_AEAD) += aead.o
13obj-$(CONFIG_CRYPTO_BLKCIPHER) += blkcipher.o 12
13crypto_blkcipher-objs := ablkcipher.o
14crypto_blkcipher-objs += blkcipher.o
15obj-$(CONFIG_CRYPTO_BLKCIPHER) += crypto_blkcipher.o
16obj-$(CONFIG_CRYPTO_BLKCIPHER) += chainiv.o
17obj-$(CONFIG_CRYPTO_BLKCIPHER) += eseqiv.o
18obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
14 19
15crypto_hash-objs := hash.o 20crypto_hash-objs := hash.o
16obj-$(CONFIG_CRYPTO_HASH) += crypto_hash.o 21obj-$(CONFIG_CRYPTO_HASH) += crypto_hash.o
@@ -32,6 +37,9 @@ obj-$(CONFIG_CRYPTO_CBC) += cbc.o
32obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o 37obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o
33obj-$(CONFIG_CRYPTO_LRW) += lrw.o 38obj-$(CONFIG_CRYPTO_LRW) += lrw.o
34obj-$(CONFIG_CRYPTO_XTS) += xts.o 39obj-$(CONFIG_CRYPTO_XTS) += xts.o
40obj-$(CONFIG_CRYPTO_CTR) += ctr.o
41obj-$(CONFIG_CRYPTO_GCM) += gcm.o
42obj-$(CONFIG_CRYPTO_CCM) += ccm.o
35obj-$(CONFIG_CRYPTO_CRYPTD) += cryptd.o 43obj-$(CONFIG_CRYPTO_CRYPTD) += cryptd.o
36obj-$(CONFIG_CRYPTO_DES) += des_generic.o 44obj-$(CONFIG_CRYPTO_DES) += des_generic.o
37obj-$(CONFIG_CRYPTO_FCRYPT) += fcrypt.o 45obj-$(CONFIG_CRYPTO_FCRYPT) += fcrypt.o
@@ -48,10 +56,12 @@ obj-$(CONFIG_CRYPTO_TEA) += tea.o
48obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o 56obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o
49obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o 57obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o
50obj-$(CONFIG_CRYPTO_SEED) += seed.o 58obj-$(CONFIG_CRYPTO_SEED) += seed.o
59obj-$(CONFIG_CRYPTO_SALSA20) += salsa20_generic.o
51obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o 60obj-$(CONFIG_CRYPTO_DEFLATE) += deflate.o
52obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o 61obj-$(CONFIG_CRYPTO_MICHAEL_MIC) += michael_mic.o
53obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o 62obj-$(CONFIG_CRYPTO_CRC32C) += crc32c.o
54obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o 63obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o
64obj-$(CONFIG_CRYPTO_LZO) += lzo.o
55 65
56obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o 66obj-$(CONFIG_CRYPTO_TEST) += tcrypt.o
57 67
diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c
index 2731acb86e7d..3bcb099b4a85 100644
--- a/crypto/ablkcipher.c
+++ b/crypto/ablkcipher.c
@@ -13,14 +13,18 @@
13 * 13 *
14 */ 14 */
15 15
16#include <crypto/algapi.h> 16#include <crypto/internal/skcipher.h>
17#include <linux/errno.h> 17#include <linux/err.h>
18#include <linux/init.h> 18#include <linux/init.h>
19#include <linux/kernel.h> 19#include <linux/kernel.h>
20#include <linux/module.h> 20#include <linux/module.h>
21#include <linux/rtnetlink.h>
22#include <linux/sched.h>
21#include <linux/slab.h> 23#include <linux/slab.h>
22#include <linux/seq_file.h> 24#include <linux/seq_file.h>
23 25
26#include "internal.h"
27
24static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key, 28static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key,
25 unsigned int keylen) 29 unsigned int keylen)
26{ 30{
@@ -66,6 +70,16 @@ static unsigned int crypto_ablkcipher_ctxsize(struct crypto_alg *alg, u32 type,
66 return alg->cra_ctxsize; 70 return alg->cra_ctxsize;
67} 71}
68 72
73int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req)
74{
75 return crypto_ablkcipher_encrypt(&req->creq);
76}
77
78int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req)
79{
80 return crypto_ablkcipher_decrypt(&req->creq);
81}
82
69static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type, 83static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type,
70 u32 mask) 84 u32 mask)
71{ 85{
@@ -78,6 +92,11 @@ static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type,
78 crt->setkey = setkey; 92 crt->setkey = setkey;
79 crt->encrypt = alg->encrypt; 93 crt->encrypt = alg->encrypt;
80 crt->decrypt = alg->decrypt; 94 crt->decrypt = alg->decrypt;
95 if (!alg->ivsize) {
96 crt->givencrypt = skcipher_null_givencrypt;
97 crt->givdecrypt = skcipher_null_givdecrypt;
98 }
99 crt->base = __crypto_ablkcipher_cast(tfm);
81 crt->ivsize = alg->ivsize; 100 crt->ivsize = alg->ivsize;
82 101
83 return 0; 102 return 0;
@@ -90,10 +109,13 @@ static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg)
90 struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher; 109 struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher;
91 110
92 seq_printf(m, "type : ablkcipher\n"); 111 seq_printf(m, "type : ablkcipher\n");
112 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
113 "yes" : "no");
93 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 114 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
94 seq_printf(m, "min keysize : %u\n", ablkcipher->min_keysize); 115 seq_printf(m, "min keysize : %u\n", ablkcipher->min_keysize);
95 seq_printf(m, "max keysize : %u\n", ablkcipher->max_keysize); 116 seq_printf(m, "max keysize : %u\n", ablkcipher->max_keysize);
96 seq_printf(m, "ivsize : %u\n", ablkcipher->ivsize); 117 seq_printf(m, "ivsize : %u\n", ablkcipher->ivsize);
118 seq_printf(m, "geniv : %s\n", ablkcipher->geniv ?: "<default>");
97} 119}
98 120
99const struct crypto_type crypto_ablkcipher_type = { 121const struct crypto_type crypto_ablkcipher_type = {
@@ -105,5 +127,220 @@ const struct crypto_type crypto_ablkcipher_type = {
105}; 127};
106EXPORT_SYMBOL_GPL(crypto_ablkcipher_type); 128EXPORT_SYMBOL_GPL(crypto_ablkcipher_type);
107 129
130static int no_givdecrypt(struct skcipher_givcrypt_request *req)
131{
132 return -ENOSYS;
133}
134
135static int crypto_init_givcipher_ops(struct crypto_tfm *tfm, u32 type,
136 u32 mask)
137{
138 struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher;
139 struct ablkcipher_tfm *crt = &tfm->crt_ablkcipher;
140
141 if (alg->ivsize > PAGE_SIZE / 8)
142 return -EINVAL;
143
144 crt->setkey = tfm->__crt_alg->cra_flags & CRYPTO_ALG_GENIV ?
145 alg->setkey : setkey;
146 crt->encrypt = alg->encrypt;
147 crt->decrypt = alg->decrypt;
148 crt->givencrypt = alg->givencrypt;
149 crt->givdecrypt = alg->givdecrypt ?: no_givdecrypt;
150 crt->base = __crypto_ablkcipher_cast(tfm);
151 crt->ivsize = alg->ivsize;
152
153 return 0;
154}
155
156static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg)
157 __attribute__ ((unused));
158static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg)
159{
160 struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher;
161
162 seq_printf(m, "type : givcipher\n");
163 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
164 "yes" : "no");
165 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
166 seq_printf(m, "min keysize : %u\n", ablkcipher->min_keysize);
167 seq_printf(m, "max keysize : %u\n", ablkcipher->max_keysize);
168 seq_printf(m, "ivsize : %u\n", ablkcipher->ivsize);
169 seq_printf(m, "geniv : %s\n", ablkcipher->geniv ?: "<built-in>");
170}
171
172const struct crypto_type crypto_givcipher_type = {
173 .ctxsize = crypto_ablkcipher_ctxsize,
174 .init = crypto_init_givcipher_ops,
175#ifdef CONFIG_PROC_FS
176 .show = crypto_givcipher_show,
177#endif
178};
179EXPORT_SYMBOL_GPL(crypto_givcipher_type);
180
181const char *crypto_default_geniv(const struct crypto_alg *alg)
182{
183 return alg->cra_flags & CRYPTO_ALG_ASYNC ? "eseqiv" : "chainiv";
184}
185
186static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask)
187{
188 struct rtattr *tb[3];
189 struct {
190 struct rtattr attr;
191 struct crypto_attr_type data;
192 } ptype;
193 struct {
194 struct rtattr attr;
195 struct crypto_attr_alg data;
196 } palg;
197 struct crypto_template *tmpl;
198 struct crypto_instance *inst;
199 struct crypto_alg *larval;
200 const char *geniv;
201 int err;
202
203 larval = crypto_larval_lookup(alg->cra_driver_name,
204 CRYPTO_ALG_TYPE_GIVCIPHER,
205 CRYPTO_ALG_TYPE_MASK);
206 err = PTR_ERR(larval);
207 if (IS_ERR(larval))
208 goto out;
209
210 err = -EAGAIN;
211 if (!crypto_is_larval(larval))
212 goto drop_larval;
213
214 ptype.attr.rta_len = sizeof(ptype);
215 ptype.attr.rta_type = CRYPTOA_TYPE;
216 ptype.data.type = type | CRYPTO_ALG_GENIV;
217 /* GENIV tells the template that we're making a default geniv. */
218 ptype.data.mask = mask | CRYPTO_ALG_GENIV;
219 tb[0] = &ptype.attr;
220
221 palg.attr.rta_len = sizeof(palg);
222 palg.attr.rta_type = CRYPTOA_ALG;
223 /* Must use the exact name to locate ourselves. */
224 memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
225 tb[1] = &palg.attr;
226
227 tb[2] = NULL;
228
229 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
230 CRYPTO_ALG_TYPE_BLKCIPHER)
231 geniv = alg->cra_blkcipher.geniv;
232 else
233 geniv = alg->cra_ablkcipher.geniv;
234
235 if (!geniv)
236 geniv = crypto_default_geniv(alg);
237
238 tmpl = crypto_lookup_template(geniv);
239 err = -ENOENT;
240 if (!tmpl)
241 goto kill_larval;
242
243 inst = tmpl->alloc(tb);
244 err = PTR_ERR(inst);
245 if (IS_ERR(inst))
246 goto put_tmpl;
247
248 if ((err = crypto_register_instance(tmpl, inst))) {
249 tmpl->free(inst);
250 goto put_tmpl;
251 }
252
253 /* Redo the lookup to use the instance we just registered. */
254 err = -EAGAIN;
255
256put_tmpl:
257 crypto_tmpl_put(tmpl);
258kill_larval:
259 crypto_larval_kill(larval);
260drop_larval:
261 crypto_mod_put(larval);
262out:
263 crypto_mod_put(alg);
264 return err;
265}
266
267static struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type,
268 u32 mask)
269{
270 struct crypto_alg *alg;
271
272 alg = crypto_alg_mod_lookup(name, type, mask);
273 if (IS_ERR(alg))
274 return alg;
275
276 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
277 CRYPTO_ALG_TYPE_GIVCIPHER)
278 return alg;
279
280 if (!((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
281 CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize :
282 alg->cra_ablkcipher.ivsize))
283 return alg;
284
285 return ERR_PTR(crypto_givcipher_default(alg, type, mask));
286}
287
288int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
289 u32 type, u32 mask)
290{
291 struct crypto_alg *alg;
292 int err;
293
294 type = crypto_skcipher_type(type);
295 mask = crypto_skcipher_mask(mask);
296
297 alg = crypto_lookup_skcipher(name, type, mask);
298 if (IS_ERR(alg))
299 return PTR_ERR(alg);
300
301 err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
302 crypto_mod_put(alg);
303 return err;
304}
305EXPORT_SYMBOL_GPL(crypto_grab_skcipher);
306
307struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name,
308 u32 type, u32 mask)
309{
310 struct crypto_tfm *tfm;
311 int err;
312
313 type = crypto_skcipher_type(type);
314 mask = crypto_skcipher_mask(mask);
315
316 for (;;) {
317 struct crypto_alg *alg;
318
319 alg = crypto_lookup_skcipher(alg_name, type, mask);
320 if (IS_ERR(alg)) {
321 err = PTR_ERR(alg);
322 goto err;
323 }
324
325 tfm = __crypto_alloc_tfm(alg, type, mask);
326 if (!IS_ERR(tfm))
327 return __crypto_ablkcipher_cast(tfm);
328
329 crypto_mod_put(alg);
330 err = PTR_ERR(tfm);
331
332err:
333 if (err != -EAGAIN)
334 break;
335 if (signal_pending(current)) {
336 err = -EINTR;
337 break;
338 }
339 }
340
341 return ERR_PTR(err);
342}
343EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher);
344
108MODULE_LICENSE("GPL"); 345MODULE_LICENSE("GPL");
109MODULE_DESCRIPTION("Asynchronous block chaining cipher type"); 346MODULE_DESCRIPTION("Asynchronous block chaining cipher type");
diff --git a/crypto/aead.c b/crypto/aead.c
index 84a3501fb478..3a6f3f52c7c7 100644
--- a/crypto/aead.c
+++ b/crypto/aead.c
@@ -12,14 +12,17 @@
12 * 12 *
13 */ 13 */
14 14
15#include <crypto/algapi.h> 15#include <crypto/internal/aead.h>
16#include <linux/errno.h> 16#include <linux/err.h>
17#include <linux/init.h> 17#include <linux/init.h>
18#include <linux/kernel.h> 18#include <linux/kernel.h>
19#include <linux/module.h> 19#include <linux/module.h>
20#include <linux/rtnetlink.h>
20#include <linux/slab.h> 21#include <linux/slab.h>
21#include <linux/seq_file.h> 22#include <linux/seq_file.h>
22 23
24#include "internal.h"
25
23static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key, 26static int setkey_unaligned(struct crypto_aead *tfm, const u8 *key,
24 unsigned int keylen) 27 unsigned int keylen)
25{ 28{
@@ -53,25 +56,54 @@ static int setkey(struct crypto_aead *tfm, const u8 *key, unsigned int keylen)
53 return aead->setkey(tfm, key, keylen); 56 return aead->setkey(tfm, key, keylen);
54} 57}
55 58
59int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
60{
61 struct aead_tfm *crt = crypto_aead_crt(tfm);
62 int err;
63
64 if (authsize > crypto_aead_alg(tfm)->maxauthsize)
65 return -EINVAL;
66
67 if (crypto_aead_alg(tfm)->setauthsize) {
68 err = crypto_aead_alg(tfm)->setauthsize(crt->base, authsize);
69 if (err)
70 return err;
71 }
72
73 crypto_aead_crt(crt->base)->authsize = authsize;
74 crt->authsize = authsize;
75 return 0;
76}
77EXPORT_SYMBOL_GPL(crypto_aead_setauthsize);
78
56static unsigned int crypto_aead_ctxsize(struct crypto_alg *alg, u32 type, 79static unsigned int crypto_aead_ctxsize(struct crypto_alg *alg, u32 type,
57 u32 mask) 80 u32 mask)
58{ 81{
59 return alg->cra_ctxsize; 82 return alg->cra_ctxsize;
60} 83}
61 84
85static int no_givcrypt(struct aead_givcrypt_request *req)
86{
87 return -ENOSYS;
88}
89
62static int crypto_init_aead_ops(struct crypto_tfm *tfm, u32 type, u32 mask) 90static int crypto_init_aead_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
63{ 91{
64 struct aead_alg *alg = &tfm->__crt_alg->cra_aead; 92 struct aead_alg *alg = &tfm->__crt_alg->cra_aead;
65 struct aead_tfm *crt = &tfm->crt_aead; 93 struct aead_tfm *crt = &tfm->crt_aead;
66 94
67 if (max(alg->authsize, alg->ivsize) > PAGE_SIZE / 8) 95 if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
68 return -EINVAL; 96 return -EINVAL;
69 97
70 crt->setkey = setkey; 98 crt->setkey = tfm->__crt_alg->cra_flags & CRYPTO_ALG_GENIV ?
99 alg->setkey : setkey;
71 crt->encrypt = alg->encrypt; 100 crt->encrypt = alg->encrypt;
72 crt->decrypt = alg->decrypt; 101 crt->decrypt = alg->decrypt;
102 crt->givencrypt = alg->givencrypt ?: no_givcrypt;
103 crt->givdecrypt = alg->givdecrypt ?: no_givcrypt;
104 crt->base = __crypto_aead_cast(tfm);
73 crt->ivsize = alg->ivsize; 105 crt->ivsize = alg->ivsize;
74 crt->authsize = alg->authsize; 106 crt->authsize = alg->maxauthsize;
75 107
76 return 0; 108 return 0;
77} 109}
@@ -83,9 +115,12 @@ static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
83 struct aead_alg *aead = &alg->cra_aead; 115 struct aead_alg *aead = &alg->cra_aead;
84 116
85 seq_printf(m, "type : aead\n"); 117 seq_printf(m, "type : aead\n");
118 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
119 "yes" : "no");
86 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 120 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
87 seq_printf(m, "ivsize : %u\n", aead->ivsize); 121 seq_printf(m, "ivsize : %u\n", aead->ivsize);
88 seq_printf(m, "authsize : %u\n", aead->authsize); 122 seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
123 seq_printf(m, "geniv : %s\n", aead->geniv ?: "<built-in>");
89} 124}
90 125
91const struct crypto_type crypto_aead_type = { 126const struct crypto_type crypto_aead_type = {
@@ -97,5 +132,358 @@ const struct crypto_type crypto_aead_type = {
97}; 132};
98EXPORT_SYMBOL_GPL(crypto_aead_type); 133EXPORT_SYMBOL_GPL(crypto_aead_type);
99 134
135static int aead_null_givencrypt(struct aead_givcrypt_request *req)
136{
137 return crypto_aead_encrypt(&req->areq);
138}
139
140static int aead_null_givdecrypt(struct aead_givcrypt_request *req)
141{
142 return crypto_aead_decrypt(&req->areq);
143}
144
145static int crypto_init_nivaead_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
146{
147 struct aead_alg *alg = &tfm->__crt_alg->cra_aead;
148 struct aead_tfm *crt = &tfm->crt_aead;
149
150 if (max(alg->maxauthsize, alg->ivsize) > PAGE_SIZE / 8)
151 return -EINVAL;
152
153 crt->setkey = setkey;
154 crt->encrypt = alg->encrypt;
155 crt->decrypt = alg->decrypt;
156 if (!alg->ivsize) {
157 crt->givencrypt = aead_null_givencrypt;
158 crt->givdecrypt = aead_null_givdecrypt;
159 }
160 crt->base = __crypto_aead_cast(tfm);
161 crt->ivsize = alg->ivsize;
162 crt->authsize = alg->maxauthsize;
163
164 return 0;
165}
166
167static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
168 __attribute__ ((unused));
169static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
170{
171 struct aead_alg *aead = &alg->cra_aead;
172
173 seq_printf(m, "type : nivaead\n");
174 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
175 "yes" : "no");
176 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
177 seq_printf(m, "ivsize : %u\n", aead->ivsize);
178 seq_printf(m, "maxauthsize : %u\n", aead->maxauthsize);
179 seq_printf(m, "geniv : %s\n", aead->geniv);
180}
181
182const struct crypto_type crypto_nivaead_type = {
183 .ctxsize = crypto_aead_ctxsize,
184 .init = crypto_init_nivaead_ops,
185#ifdef CONFIG_PROC_FS
186 .show = crypto_nivaead_show,
187#endif
188};
189EXPORT_SYMBOL_GPL(crypto_nivaead_type);
190
191static int crypto_grab_nivaead(struct crypto_aead_spawn *spawn,
192 const char *name, u32 type, u32 mask)
193{
194 struct crypto_alg *alg;
195 int err;
196
197 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
198 type |= CRYPTO_ALG_TYPE_AEAD;
199 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV;
200
201 alg = crypto_alg_mod_lookup(name, type, mask);
202 if (IS_ERR(alg))
203 return PTR_ERR(alg);
204
205 err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
206 crypto_mod_put(alg);
207 return err;
208}
209
210struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
211 struct rtattr **tb, u32 type,
212 u32 mask)
213{
214 const char *name;
215 struct crypto_aead_spawn *spawn;
216 struct crypto_attr_type *algt;
217 struct crypto_instance *inst;
218 struct crypto_alg *alg;
219 int err;
220
221 algt = crypto_get_attr_type(tb);
222 err = PTR_ERR(algt);
223 if (IS_ERR(algt))
224 return ERR_PTR(err);
225
226 if ((algt->type ^ (CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV)) &
227 algt->mask)
228 return ERR_PTR(-EINVAL);
229
230 name = crypto_attr_alg_name(tb[1]);
231 err = PTR_ERR(name);
232 if (IS_ERR(name))
233 return ERR_PTR(err);
234
235 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
236 if (!inst)
237 return ERR_PTR(-ENOMEM);
238
239 spawn = crypto_instance_ctx(inst);
240
241 /* Ignore async algorithms if necessary. */
242 mask |= crypto_requires_sync(algt->type, algt->mask);
243
244 crypto_set_aead_spawn(spawn, inst);
245 err = crypto_grab_nivaead(spawn, name, type, mask);
246 if (err)
247 goto err_free_inst;
248
249 alg = crypto_aead_spawn_alg(spawn);
250
251 err = -EINVAL;
252 if (!alg->cra_aead.ivsize)
253 goto err_drop_alg;
254
255 /*
256 * This is only true if we're constructing an algorithm with its
257 * default IV generator. For the default generator we elide the
258 * template name and double-check the IV generator.
259 */
260 if (algt->mask & CRYPTO_ALG_GENIV) {
261 if (strcmp(tmpl->name, alg->cra_aead.geniv))
262 goto err_drop_alg;
263
264 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
265 memcpy(inst->alg.cra_driver_name, alg->cra_driver_name,
266 CRYPTO_MAX_ALG_NAME);
267 } else {
268 err = -ENAMETOOLONG;
269 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
270 "%s(%s)", tmpl->name, alg->cra_name) >=
271 CRYPTO_MAX_ALG_NAME)
272 goto err_drop_alg;
273 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
274 "%s(%s)", tmpl->name, alg->cra_driver_name) >=
275 CRYPTO_MAX_ALG_NAME)
276 goto err_drop_alg;
277 }
278
279 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV;
280 inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
281 inst->alg.cra_priority = alg->cra_priority;
282 inst->alg.cra_blocksize = alg->cra_blocksize;
283 inst->alg.cra_alignmask = alg->cra_alignmask;
284 inst->alg.cra_type = &crypto_aead_type;
285
286 inst->alg.cra_aead.ivsize = alg->cra_aead.ivsize;
287 inst->alg.cra_aead.maxauthsize = alg->cra_aead.maxauthsize;
288 inst->alg.cra_aead.geniv = alg->cra_aead.geniv;
289
290 inst->alg.cra_aead.setkey = alg->cra_aead.setkey;
291 inst->alg.cra_aead.setauthsize = alg->cra_aead.setauthsize;
292 inst->alg.cra_aead.encrypt = alg->cra_aead.encrypt;
293 inst->alg.cra_aead.decrypt = alg->cra_aead.decrypt;
294
295out:
296 return inst;
297
298err_drop_alg:
299 crypto_drop_aead(spawn);
300err_free_inst:
301 kfree(inst);
302 inst = ERR_PTR(err);
303 goto out;
304}
305EXPORT_SYMBOL_GPL(aead_geniv_alloc);
306
307void aead_geniv_free(struct crypto_instance *inst)
308{
309 crypto_drop_aead(crypto_instance_ctx(inst));
310 kfree(inst);
311}
312EXPORT_SYMBOL_GPL(aead_geniv_free);
313
314int aead_geniv_init(struct crypto_tfm *tfm)
315{
316 struct crypto_instance *inst = (void *)tfm->__crt_alg;
317 struct crypto_aead *aead;
318
319 aead = crypto_spawn_aead(crypto_instance_ctx(inst));
320 if (IS_ERR(aead))
321 return PTR_ERR(aead);
322
323 tfm->crt_aead.base = aead;
324 tfm->crt_aead.reqsize += crypto_aead_reqsize(aead);
325
326 return 0;
327}
328EXPORT_SYMBOL_GPL(aead_geniv_init);
329
330void aead_geniv_exit(struct crypto_tfm *tfm)
331{
332 crypto_free_aead(tfm->crt_aead.base);
333}
334EXPORT_SYMBOL_GPL(aead_geniv_exit);
335
336static int crypto_nivaead_default(struct crypto_alg *alg, u32 type, u32 mask)
337{
338 struct rtattr *tb[3];
339 struct {
340 struct rtattr attr;
341 struct crypto_attr_type data;
342 } ptype;
343 struct {
344 struct rtattr attr;
345 struct crypto_attr_alg data;
346 } palg;
347 struct crypto_template *tmpl;
348 struct crypto_instance *inst;
349 struct crypto_alg *larval;
350 const char *geniv;
351 int err;
352
353 larval = crypto_larval_lookup(alg->cra_driver_name,
354 CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_GENIV,
355 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
356 err = PTR_ERR(larval);
357 if (IS_ERR(larval))
358 goto out;
359
360 err = -EAGAIN;
361 if (!crypto_is_larval(larval))
362 goto drop_larval;
363
364 ptype.attr.rta_len = sizeof(ptype);
365 ptype.attr.rta_type = CRYPTOA_TYPE;
366 ptype.data.type = type | CRYPTO_ALG_GENIV;
367 /* GENIV tells the template that we're making a default geniv. */
368 ptype.data.mask = mask | CRYPTO_ALG_GENIV;
369 tb[0] = &ptype.attr;
370
371 palg.attr.rta_len = sizeof(palg);
372 palg.attr.rta_type = CRYPTOA_ALG;
373 /* Must use the exact name to locate ourselves. */
374 memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME);
375 tb[1] = &palg.attr;
376
377 tb[2] = NULL;
378
379 geniv = alg->cra_aead.geniv;
380
381 tmpl = crypto_lookup_template(geniv);
382 err = -ENOENT;
383 if (!tmpl)
384 goto kill_larval;
385
386 inst = tmpl->alloc(tb);
387 err = PTR_ERR(inst);
388 if (IS_ERR(inst))
389 goto put_tmpl;
390
391 if ((err = crypto_register_instance(tmpl, inst))) {
392 tmpl->free(inst);
393 goto put_tmpl;
394 }
395
396 /* Redo the lookup to use the instance we just registered. */
397 err = -EAGAIN;
398
399put_tmpl:
400 crypto_tmpl_put(tmpl);
401kill_larval:
402 crypto_larval_kill(larval);
403drop_larval:
404 crypto_mod_put(larval);
405out:
406 crypto_mod_put(alg);
407 return err;
408}
409
410static struct crypto_alg *crypto_lookup_aead(const char *name, u32 type,
411 u32 mask)
412{
413 struct crypto_alg *alg;
414
415 alg = crypto_alg_mod_lookup(name, type, mask);
416 if (IS_ERR(alg))
417 return alg;
418
419 if (alg->cra_type == &crypto_aead_type)
420 return alg;
421
422 if (!alg->cra_aead.ivsize)
423 return alg;
424
425 return ERR_PTR(crypto_nivaead_default(alg, type, mask));
426}
427
428int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
429 u32 type, u32 mask)
430{
431 struct crypto_alg *alg;
432 int err;
433
434 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
435 type |= CRYPTO_ALG_TYPE_AEAD;
436 mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
437 mask |= CRYPTO_ALG_TYPE_MASK;
438
439 alg = crypto_lookup_aead(name, type, mask);
440 if (IS_ERR(alg))
441 return PTR_ERR(alg);
442
443 err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
444 crypto_mod_put(alg);
445 return err;
446}
447EXPORT_SYMBOL_GPL(crypto_grab_aead);
448
449struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask)
450{
451 struct crypto_tfm *tfm;
452 int err;
453
454 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
455 type |= CRYPTO_ALG_TYPE_AEAD;
456 mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
457 mask |= CRYPTO_ALG_TYPE_MASK;
458
459 for (;;) {
460 struct crypto_alg *alg;
461
462 alg = crypto_lookup_aead(alg_name, type, mask);
463 if (IS_ERR(alg)) {
464 err = PTR_ERR(alg);
465 goto err;
466 }
467
468 tfm = __crypto_alloc_tfm(alg, type, mask);
469 if (!IS_ERR(tfm))
470 return __crypto_aead_cast(tfm);
471
472 crypto_mod_put(alg);
473 err = PTR_ERR(tfm);
474
475err:
476 if (err != -EAGAIN)
477 break;
478 if (signal_pending(current)) {
479 err = -EINTR;
480 break;
481 }
482 }
483
484 return ERR_PTR(err);
485}
486EXPORT_SYMBOL_GPL(crypto_alloc_aead);
487
100MODULE_LICENSE("GPL"); 488MODULE_LICENSE("GPL");
101MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)"); 489MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");
diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c
index 9401dca85e87..cf30af74480f 100644
--- a/crypto/aes_generic.c
+++ b/crypto/aes_generic.c
@@ -47,11 +47,7 @@
47 * --------------------------------------------------------------------------- 47 * ---------------------------------------------------------------------------
48 */ 48 */
49 49
50/* Some changes from the Gladman version: 50#include <crypto/aes.h>
51 s/RIJNDAEL(e_key)/E_KEY/g
52 s/RIJNDAEL(d_key)/D_KEY/g
53*/
54
55#include <linux/module.h> 51#include <linux/module.h>
56#include <linux/init.h> 52#include <linux/init.h>
57#include <linux/types.h> 53#include <linux/types.h>
@@ -59,88 +55,46 @@
59#include <linux/crypto.h> 55#include <linux/crypto.h>
60#include <asm/byteorder.h> 56#include <asm/byteorder.h>
61 57
62#define AES_MIN_KEY_SIZE 16 58static inline u8 byte(const u32 x, const unsigned n)
63#define AES_MAX_KEY_SIZE 32
64
65#define AES_BLOCK_SIZE 16
66
67/*
68 * #define byte(x, nr) ((unsigned char)((x) >> (nr*8)))
69 */
70static inline u8
71byte(const u32 x, const unsigned n)
72{ 59{
73 return x >> (n << 3); 60 return x >> (n << 3);
74} 61}
75 62
76struct aes_ctx {
77 int key_length;
78 u32 buf[120];
79};
80
81#define E_KEY (&ctx->buf[0])
82#define D_KEY (&ctx->buf[60])
83
84static u8 pow_tab[256] __initdata; 63static u8 pow_tab[256] __initdata;
85static u8 log_tab[256] __initdata; 64static u8 log_tab[256] __initdata;
86static u8 sbx_tab[256] __initdata; 65static u8 sbx_tab[256] __initdata;
87static u8 isb_tab[256] __initdata; 66static u8 isb_tab[256] __initdata;
88static u32 rco_tab[10]; 67static u32 rco_tab[10];
89static u32 ft_tab[4][256];
90static u32 it_tab[4][256];
91 68
92static u32 fl_tab[4][256]; 69u32 crypto_ft_tab[4][256];
93static u32 il_tab[4][256]; 70u32 crypto_fl_tab[4][256];
71u32 crypto_it_tab[4][256];
72u32 crypto_il_tab[4][256];
94 73
95static inline u8 __init 74EXPORT_SYMBOL_GPL(crypto_ft_tab);
96f_mult (u8 a, u8 b) 75EXPORT_SYMBOL_GPL(crypto_fl_tab);
76EXPORT_SYMBOL_GPL(crypto_it_tab);
77EXPORT_SYMBOL_GPL(crypto_il_tab);
78
79static inline u8 __init f_mult(u8 a, u8 b)
97{ 80{
98 u8 aa = log_tab[a], cc = aa + log_tab[b]; 81 u8 aa = log_tab[a], cc = aa + log_tab[b];
99 82
100 return pow_tab[cc + (cc < aa ? 1 : 0)]; 83 return pow_tab[cc + (cc < aa ? 1 : 0)];
101} 84}
102 85
103#define ff_mult(a,b) (a && b ? f_mult(a, b) : 0) 86#define ff_mult(a, b) (a && b ? f_mult(a, b) : 0)
104 87
105#define f_rn(bo, bi, n, k) \ 88static void __init gen_tabs(void)
106 bo[n] = ft_tab[0][byte(bi[n],0)] ^ \
107 ft_tab[1][byte(bi[(n + 1) & 3],1)] ^ \
108 ft_tab[2][byte(bi[(n + 2) & 3],2)] ^ \
109 ft_tab[3][byte(bi[(n + 3) & 3],3)] ^ *(k + n)
110
111#define i_rn(bo, bi, n, k) \
112 bo[n] = it_tab[0][byte(bi[n],0)] ^ \
113 it_tab[1][byte(bi[(n + 3) & 3],1)] ^ \
114 it_tab[2][byte(bi[(n + 2) & 3],2)] ^ \
115 it_tab[3][byte(bi[(n + 1) & 3],3)] ^ *(k + n)
116
117#define ls_box(x) \
118 ( fl_tab[0][byte(x, 0)] ^ \
119 fl_tab[1][byte(x, 1)] ^ \
120 fl_tab[2][byte(x, 2)] ^ \
121 fl_tab[3][byte(x, 3)] )
122
123#define f_rl(bo, bi, n, k) \
124 bo[n] = fl_tab[0][byte(bi[n],0)] ^ \
125 fl_tab[1][byte(bi[(n + 1) & 3],1)] ^ \
126 fl_tab[2][byte(bi[(n + 2) & 3],2)] ^ \
127 fl_tab[3][byte(bi[(n + 3) & 3],3)] ^ *(k + n)
128
129#define i_rl(bo, bi, n, k) \
130 bo[n] = il_tab[0][byte(bi[n],0)] ^ \
131 il_tab[1][byte(bi[(n + 3) & 3],1)] ^ \
132 il_tab[2][byte(bi[(n + 2) & 3],2)] ^ \
133 il_tab[3][byte(bi[(n + 1) & 3],3)] ^ *(k + n)
134
135static void __init
136gen_tabs (void)
137{ 89{
138 u32 i, t; 90 u32 i, t;
139 u8 p, q; 91 u8 p, q;
140 92
141 /* log and power tables for GF(2**8) finite field with 93 /*
142 0x011b as modular polynomial - the simplest primitive 94 * log and power tables for GF(2**8) finite field with
143 root is 0x03, used here to generate the tables */ 95 * 0x011b as modular polynomial - the simplest primitive
96 * root is 0x03, used here to generate the tables
97 */
144 98
145 for (i = 0, p = 1; i < 256; ++i) { 99 for (i = 0, p = 1; i < 256; ++i) {
146 pow_tab[i] = (u8) p; 100 pow_tab[i] = (u8) p;
@@ -169,92 +123,119 @@ gen_tabs (void)
169 p = sbx_tab[i]; 123 p = sbx_tab[i];
170 124
171 t = p; 125 t = p;
172 fl_tab[0][i] = t; 126 crypto_fl_tab[0][i] = t;
173 fl_tab[1][i] = rol32(t, 8); 127 crypto_fl_tab[1][i] = rol32(t, 8);
174 fl_tab[2][i] = rol32(t, 16); 128 crypto_fl_tab[2][i] = rol32(t, 16);
175 fl_tab[3][i] = rol32(t, 24); 129 crypto_fl_tab[3][i] = rol32(t, 24);
176 130
177 t = ((u32) ff_mult (2, p)) | 131 t = ((u32) ff_mult(2, p)) |
178 ((u32) p << 8) | 132 ((u32) p << 8) |
179 ((u32) p << 16) | ((u32) ff_mult (3, p) << 24); 133 ((u32) p << 16) | ((u32) ff_mult(3, p) << 24);
180 134
181 ft_tab[0][i] = t; 135 crypto_ft_tab[0][i] = t;
182 ft_tab[1][i] = rol32(t, 8); 136 crypto_ft_tab[1][i] = rol32(t, 8);
183 ft_tab[2][i] = rol32(t, 16); 137 crypto_ft_tab[2][i] = rol32(t, 16);
184 ft_tab[3][i] = rol32(t, 24); 138 crypto_ft_tab[3][i] = rol32(t, 24);
185 139
186 p = isb_tab[i]; 140 p = isb_tab[i];
187 141
188 t = p; 142 t = p;
189 il_tab[0][i] = t; 143 crypto_il_tab[0][i] = t;
190 il_tab[1][i] = rol32(t, 8); 144 crypto_il_tab[1][i] = rol32(t, 8);
191 il_tab[2][i] = rol32(t, 16); 145 crypto_il_tab[2][i] = rol32(t, 16);
192 il_tab[3][i] = rol32(t, 24); 146 crypto_il_tab[3][i] = rol32(t, 24);
193 147
194 t = ((u32) ff_mult (14, p)) | 148 t = ((u32) ff_mult(14, p)) |
195 ((u32) ff_mult (9, p) << 8) | 149 ((u32) ff_mult(9, p) << 8) |
196 ((u32) ff_mult (13, p) << 16) | 150 ((u32) ff_mult(13, p) << 16) |
197 ((u32) ff_mult (11, p) << 24); 151 ((u32) ff_mult(11, p) << 24);
198 152
199 it_tab[0][i] = t; 153 crypto_it_tab[0][i] = t;
200 it_tab[1][i] = rol32(t, 8); 154 crypto_it_tab[1][i] = rol32(t, 8);
201 it_tab[2][i] = rol32(t, 16); 155 crypto_it_tab[2][i] = rol32(t, 16);
202 it_tab[3][i] = rol32(t, 24); 156 crypto_it_tab[3][i] = rol32(t, 24);
203 } 157 }
204} 158}
205 159
206#define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b)
207
208#define imix_col(y,x) \
209 u = star_x(x); \
210 v = star_x(u); \
211 w = star_x(v); \
212 t = w ^ (x); \
213 (y) = u ^ v ^ w; \
214 (y) ^= ror32(u ^ t, 8) ^ \
215 ror32(v ^ t, 16) ^ \
216 ror32(t,24)
217
218/* initialise the key schedule from the user supplied key */ 160/* initialise the key schedule from the user supplied key */
219 161
220#define loop4(i) \ 162#define star_x(x) (((x) & 0x7f7f7f7f) << 1) ^ ((((x) & 0x80808080) >> 7) * 0x1b)
221{ t = ror32(t, 8); t = ls_box(t) ^ rco_tab[i]; \
222 t ^= E_KEY[4 * i]; E_KEY[4 * i + 4] = t; \
223 t ^= E_KEY[4 * i + 1]; E_KEY[4 * i + 5] = t; \
224 t ^= E_KEY[4 * i + 2]; E_KEY[4 * i + 6] = t; \
225 t ^= E_KEY[4 * i + 3]; E_KEY[4 * i + 7] = t; \
226}
227
228#define loop6(i) \
229{ t = ror32(t, 8); t = ls_box(t) ^ rco_tab[i]; \
230 t ^= E_KEY[6 * i]; E_KEY[6 * i + 6] = t; \
231 t ^= E_KEY[6 * i + 1]; E_KEY[6 * i + 7] = t; \
232 t ^= E_KEY[6 * i + 2]; E_KEY[6 * i + 8] = t; \
233 t ^= E_KEY[6 * i + 3]; E_KEY[6 * i + 9] = t; \
234 t ^= E_KEY[6 * i + 4]; E_KEY[6 * i + 10] = t; \
235 t ^= E_KEY[6 * i + 5]; E_KEY[6 * i + 11] = t; \
236}
237
238#define loop8(i) \
239{ t = ror32(t, 8); ; t = ls_box(t) ^ rco_tab[i]; \
240 t ^= E_KEY[8 * i]; E_KEY[8 * i + 8] = t; \
241 t ^= E_KEY[8 * i + 1]; E_KEY[8 * i + 9] = t; \
242 t ^= E_KEY[8 * i + 2]; E_KEY[8 * i + 10] = t; \
243 t ^= E_KEY[8 * i + 3]; E_KEY[8 * i + 11] = t; \
244 t = E_KEY[8 * i + 4] ^ ls_box(t); \
245 E_KEY[8 * i + 12] = t; \
246 t ^= E_KEY[8 * i + 5]; E_KEY[8 * i + 13] = t; \
247 t ^= E_KEY[8 * i + 6]; E_KEY[8 * i + 14] = t; \
248 t ^= E_KEY[8 * i + 7]; E_KEY[8 * i + 15] = t; \
249}
250 163
251static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 164#define imix_col(y,x) do { \
252 unsigned int key_len) 165 u = star_x(x); \
166 v = star_x(u); \
167 w = star_x(v); \
168 t = w ^ (x); \
169 (y) = u ^ v ^ w; \
170 (y) ^= ror32(u ^ t, 8) ^ \
171 ror32(v ^ t, 16) ^ \
172 ror32(t, 24); \
173} while (0)
174
175#define ls_box(x) \
176 crypto_fl_tab[0][byte(x, 0)] ^ \
177 crypto_fl_tab[1][byte(x, 1)] ^ \
178 crypto_fl_tab[2][byte(x, 2)] ^ \
179 crypto_fl_tab[3][byte(x, 3)]
180
181#define loop4(i) do { \
182 t = ror32(t, 8); \
183 t = ls_box(t) ^ rco_tab[i]; \
184 t ^= ctx->key_enc[4 * i]; \
185 ctx->key_enc[4 * i + 4] = t; \
186 t ^= ctx->key_enc[4 * i + 1]; \
187 ctx->key_enc[4 * i + 5] = t; \
188 t ^= ctx->key_enc[4 * i + 2]; \
189 ctx->key_enc[4 * i + 6] = t; \
190 t ^= ctx->key_enc[4 * i + 3]; \
191 ctx->key_enc[4 * i + 7] = t; \
192} while (0)
193
194#define loop6(i) do { \
195 t = ror32(t, 8); \
196 t = ls_box(t) ^ rco_tab[i]; \
197 t ^= ctx->key_enc[6 * i]; \
198 ctx->key_enc[6 * i + 6] = t; \
199 t ^= ctx->key_enc[6 * i + 1]; \
200 ctx->key_enc[6 * i + 7] = t; \
201 t ^= ctx->key_enc[6 * i + 2]; \
202 ctx->key_enc[6 * i + 8] = t; \
203 t ^= ctx->key_enc[6 * i + 3]; \
204 ctx->key_enc[6 * i + 9] = t; \
205 t ^= ctx->key_enc[6 * i + 4]; \
206 ctx->key_enc[6 * i + 10] = t; \
207 t ^= ctx->key_enc[6 * i + 5]; \
208 ctx->key_enc[6 * i + 11] = t; \
209} while (0)
210
211#define loop8(i) do { \
212 t = ror32(t, 8); \
213 t = ls_box(t) ^ rco_tab[i]; \
214 t ^= ctx->key_enc[8 * i]; \
215 ctx->key_enc[8 * i + 8] = t; \
216 t ^= ctx->key_enc[8 * i + 1]; \
217 ctx->key_enc[8 * i + 9] = t; \
218 t ^= ctx->key_enc[8 * i + 2]; \
219 ctx->key_enc[8 * i + 10] = t; \
220 t ^= ctx->key_enc[8 * i + 3]; \
221 ctx->key_enc[8 * i + 11] = t; \
222 t = ctx->key_enc[8 * i + 4] ^ ls_box(t); \
223 ctx->key_enc[8 * i + 12] = t; \
224 t ^= ctx->key_enc[8 * i + 5]; \
225 ctx->key_enc[8 * i + 13] = t; \
226 t ^= ctx->key_enc[8 * i + 6]; \
227 ctx->key_enc[8 * i + 14] = t; \
228 t ^= ctx->key_enc[8 * i + 7]; \
229 ctx->key_enc[8 * i + 15] = t; \
230} while (0)
231
232int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
233 unsigned int key_len)
253{ 234{
254 struct aes_ctx *ctx = crypto_tfm_ctx(tfm); 235 struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
255 const __le32 *key = (const __le32 *)in_key; 236 const __le32 *key = (const __le32 *)in_key;
256 u32 *flags = &tfm->crt_flags; 237 u32 *flags = &tfm->crt_flags;
257 u32 i, t, u, v, w; 238 u32 i, t, u, v, w, j;
258 239
259 if (key_len % 8) { 240 if (key_len % 8) {
260 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 241 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
@@ -263,95 +244,113 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
263 244
264 ctx->key_length = key_len; 245 ctx->key_length = key_len;
265 246
266 E_KEY[0] = le32_to_cpu(key[0]); 247 ctx->key_dec[key_len + 24] = ctx->key_enc[0] = le32_to_cpu(key[0]);
267 E_KEY[1] = le32_to_cpu(key[1]); 248 ctx->key_dec[key_len + 25] = ctx->key_enc[1] = le32_to_cpu(key[1]);
268 E_KEY[2] = le32_to_cpu(key[2]); 249 ctx->key_dec[key_len + 26] = ctx->key_enc[2] = le32_to_cpu(key[2]);
269 E_KEY[3] = le32_to_cpu(key[3]); 250 ctx->key_dec[key_len + 27] = ctx->key_enc[3] = le32_to_cpu(key[3]);
270 251
271 switch (key_len) { 252 switch (key_len) {
272 case 16: 253 case 16:
273 t = E_KEY[3]; 254 t = ctx->key_enc[3];
274 for (i = 0; i < 10; ++i) 255 for (i = 0; i < 10; ++i)
275 loop4 (i); 256 loop4(i);
276 break; 257 break;
277 258
278 case 24: 259 case 24:
279 E_KEY[4] = le32_to_cpu(key[4]); 260 ctx->key_enc[4] = le32_to_cpu(key[4]);
280 t = E_KEY[5] = le32_to_cpu(key[5]); 261 t = ctx->key_enc[5] = le32_to_cpu(key[5]);
281 for (i = 0; i < 8; ++i) 262 for (i = 0; i < 8; ++i)
282 loop6 (i); 263 loop6(i);
283 break; 264 break;
284 265
285 case 32: 266 case 32:
286 E_KEY[4] = le32_to_cpu(key[4]); 267 ctx->key_enc[4] = le32_to_cpu(key[4]);
287 E_KEY[5] = le32_to_cpu(key[5]); 268 ctx->key_enc[5] = le32_to_cpu(key[5]);
288 E_KEY[6] = le32_to_cpu(key[6]); 269 ctx->key_enc[6] = le32_to_cpu(key[6]);
289 t = E_KEY[7] = le32_to_cpu(key[7]); 270 t = ctx->key_enc[7] = le32_to_cpu(key[7]);
290 for (i = 0; i < 7; ++i) 271 for (i = 0; i < 7; ++i)
291 loop8 (i); 272 loop8(i);
292 break; 273 break;
293 } 274 }
294 275
295 D_KEY[0] = E_KEY[0]; 276 ctx->key_dec[0] = ctx->key_enc[key_len + 24];
296 D_KEY[1] = E_KEY[1]; 277 ctx->key_dec[1] = ctx->key_enc[key_len + 25];
297 D_KEY[2] = E_KEY[2]; 278 ctx->key_dec[2] = ctx->key_enc[key_len + 26];
298 D_KEY[3] = E_KEY[3]; 279 ctx->key_dec[3] = ctx->key_enc[key_len + 27];
299 280
300 for (i = 4; i < key_len + 24; ++i) { 281 for (i = 4; i < key_len + 24; ++i) {
301 imix_col (D_KEY[i], E_KEY[i]); 282 j = key_len + 24 - (i & ~3) + (i & 3);
283 imix_col(ctx->key_dec[j], ctx->key_enc[i]);
302 } 284 }
303
304 return 0; 285 return 0;
305} 286}
287EXPORT_SYMBOL_GPL(crypto_aes_set_key);
306 288
307/* encrypt a block of text */ 289/* encrypt a block of text */
308 290
309#define f_nround(bo, bi, k) \ 291#define f_rn(bo, bi, n, k) do { \
310 f_rn(bo, bi, 0, k); \ 292 bo[n] = crypto_ft_tab[0][byte(bi[n], 0)] ^ \
311 f_rn(bo, bi, 1, k); \ 293 crypto_ft_tab[1][byte(bi[(n + 1) & 3], 1)] ^ \
312 f_rn(bo, bi, 2, k); \ 294 crypto_ft_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \
313 f_rn(bo, bi, 3, k); \ 295 crypto_ft_tab[3][byte(bi[(n + 3) & 3], 3)] ^ *(k + n); \
314 k += 4 296} while (0)
315 297
316#define f_lround(bo, bi, k) \ 298#define f_nround(bo, bi, k) do {\
317 f_rl(bo, bi, 0, k); \ 299 f_rn(bo, bi, 0, k); \
318 f_rl(bo, bi, 1, k); \ 300 f_rn(bo, bi, 1, k); \
319 f_rl(bo, bi, 2, k); \ 301 f_rn(bo, bi, 2, k); \
320 f_rl(bo, bi, 3, k) 302 f_rn(bo, bi, 3, k); \
303 k += 4; \
304} while (0)
305
306#define f_rl(bo, bi, n, k) do { \
307 bo[n] = crypto_fl_tab[0][byte(bi[n], 0)] ^ \
308 crypto_fl_tab[1][byte(bi[(n + 1) & 3], 1)] ^ \
309 crypto_fl_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \
310 crypto_fl_tab[3][byte(bi[(n + 3) & 3], 3)] ^ *(k + n); \
311} while (0)
312
313#define f_lround(bo, bi, k) do {\
314 f_rl(bo, bi, 0, k); \
315 f_rl(bo, bi, 1, k); \
316 f_rl(bo, bi, 2, k); \
317 f_rl(bo, bi, 3, k); \
318} while (0)
321 319
322static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 320static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
323{ 321{
324 const struct aes_ctx *ctx = crypto_tfm_ctx(tfm); 322 const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
325 const __le32 *src = (const __le32 *)in; 323 const __le32 *src = (const __le32 *)in;
326 __le32 *dst = (__le32 *)out; 324 __le32 *dst = (__le32 *)out;
327 u32 b0[4], b1[4]; 325 u32 b0[4], b1[4];
328 const u32 *kp = E_KEY + 4; 326 const u32 *kp = ctx->key_enc + 4;
327 const int key_len = ctx->key_length;
329 328
330 b0[0] = le32_to_cpu(src[0]) ^ E_KEY[0]; 329 b0[0] = le32_to_cpu(src[0]) ^ ctx->key_enc[0];
331 b0[1] = le32_to_cpu(src[1]) ^ E_KEY[1]; 330 b0[1] = le32_to_cpu(src[1]) ^ ctx->key_enc[1];
332 b0[2] = le32_to_cpu(src[2]) ^ E_KEY[2]; 331 b0[2] = le32_to_cpu(src[2]) ^ ctx->key_enc[2];
333 b0[3] = le32_to_cpu(src[3]) ^ E_KEY[3]; 332 b0[3] = le32_to_cpu(src[3]) ^ ctx->key_enc[3];
334 333
335 if (ctx->key_length > 24) { 334 if (key_len > 24) {
336 f_nround (b1, b0, kp); 335 f_nround(b1, b0, kp);
337 f_nround (b0, b1, kp); 336 f_nround(b0, b1, kp);
338 } 337 }
339 338
340 if (ctx->key_length > 16) { 339 if (key_len > 16) {
341 f_nround (b1, b0, kp); 340 f_nround(b1, b0, kp);
342 f_nround (b0, b1, kp); 341 f_nround(b0, b1, kp);
343 } 342 }
344 343
345 f_nround (b1, b0, kp); 344 f_nround(b1, b0, kp);
346 f_nround (b0, b1, kp); 345 f_nround(b0, b1, kp);
347 f_nround (b1, b0, kp); 346 f_nround(b1, b0, kp);
348 f_nround (b0, b1, kp); 347 f_nround(b0, b1, kp);
349 f_nround (b1, b0, kp); 348 f_nround(b1, b0, kp);
350 f_nround (b0, b1, kp); 349 f_nround(b0, b1, kp);
351 f_nround (b1, b0, kp); 350 f_nround(b1, b0, kp);
352 f_nround (b0, b1, kp); 351 f_nround(b0, b1, kp);
353 f_nround (b1, b0, kp); 352 f_nround(b1, b0, kp);
354 f_lround (b0, b1, kp); 353 f_lround(b0, b1, kp);
355 354
356 dst[0] = cpu_to_le32(b0[0]); 355 dst[0] = cpu_to_le32(b0[0]);
357 dst[1] = cpu_to_le32(b0[1]); 356 dst[1] = cpu_to_le32(b0[1]);
@@ -361,53 +360,69 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
361 360
362/* decrypt a block of text */ 361/* decrypt a block of text */
363 362
364#define i_nround(bo, bi, k) \ 363#define i_rn(bo, bi, n, k) do { \
365 i_rn(bo, bi, 0, k); \ 364 bo[n] = crypto_it_tab[0][byte(bi[n], 0)] ^ \
366 i_rn(bo, bi, 1, k); \ 365 crypto_it_tab[1][byte(bi[(n + 3) & 3], 1)] ^ \
367 i_rn(bo, bi, 2, k); \ 366 crypto_it_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \
368 i_rn(bo, bi, 3, k); \ 367 crypto_it_tab[3][byte(bi[(n + 1) & 3], 3)] ^ *(k + n); \
369 k -= 4 368} while (0)
370 369
371#define i_lround(bo, bi, k) \ 370#define i_nround(bo, bi, k) do {\
372 i_rl(bo, bi, 0, k); \ 371 i_rn(bo, bi, 0, k); \
373 i_rl(bo, bi, 1, k); \ 372 i_rn(bo, bi, 1, k); \
374 i_rl(bo, bi, 2, k); \ 373 i_rn(bo, bi, 2, k); \
375 i_rl(bo, bi, 3, k) 374 i_rn(bo, bi, 3, k); \
375 k += 4; \
376} while (0)
377
378#define i_rl(bo, bi, n, k) do { \
379 bo[n] = crypto_il_tab[0][byte(bi[n], 0)] ^ \
380 crypto_il_tab[1][byte(bi[(n + 3) & 3], 1)] ^ \
381 crypto_il_tab[2][byte(bi[(n + 2) & 3], 2)] ^ \
382 crypto_il_tab[3][byte(bi[(n + 1) & 3], 3)] ^ *(k + n); \
383} while (0)
384
385#define i_lround(bo, bi, k) do {\
386 i_rl(bo, bi, 0, k); \
387 i_rl(bo, bi, 1, k); \
388 i_rl(bo, bi, 2, k); \
389 i_rl(bo, bi, 3, k); \
390} while (0)
376 391
377static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 392static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
378{ 393{
379 const struct aes_ctx *ctx = crypto_tfm_ctx(tfm); 394 const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
380 const __le32 *src = (const __le32 *)in; 395 const __le32 *src = (const __le32 *)in;
381 __le32 *dst = (__le32 *)out; 396 __le32 *dst = (__le32 *)out;
382 u32 b0[4], b1[4]; 397 u32 b0[4], b1[4];
383 const int key_len = ctx->key_length; 398 const int key_len = ctx->key_length;
384 const u32 *kp = D_KEY + key_len + 20; 399 const u32 *kp = ctx->key_dec + 4;
385 400
386 b0[0] = le32_to_cpu(src[0]) ^ E_KEY[key_len + 24]; 401 b0[0] = le32_to_cpu(src[0]) ^ ctx->key_dec[0];
387 b0[1] = le32_to_cpu(src[1]) ^ E_KEY[key_len + 25]; 402 b0[1] = le32_to_cpu(src[1]) ^ ctx->key_dec[1];
388 b0[2] = le32_to_cpu(src[2]) ^ E_KEY[key_len + 26]; 403 b0[2] = le32_to_cpu(src[2]) ^ ctx->key_dec[2];
389 b0[3] = le32_to_cpu(src[3]) ^ E_KEY[key_len + 27]; 404 b0[3] = le32_to_cpu(src[3]) ^ ctx->key_dec[3];
390 405
391 if (key_len > 24) { 406 if (key_len > 24) {
392 i_nround (b1, b0, kp); 407 i_nround(b1, b0, kp);
393 i_nround (b0, b1, kp); 408 i_nround(b0, b1, kp);
394 } 409 }
395 410
396 if (key_len > 16) { 411 if (key_len > 16) {
397 i_nround (b1, b0, kp); 412 i_nround(b1, b0, kp);
398 i_nround (b0, b1, kp); 413 i_nround(b0, b1, kp);
399 } 414 }
400 415
401 i_nround (b1, b0, kp); 416 i_nround(b1, b0, kp);
402 i_nround (b0, b1, kp); 417 i_nround(b0, b1, kp);
403 i_nround (b1, b0, kp); 418 i_nround(b1, b0, kp);
404 i_nround (b0, b1, kp); 419 i_nround(b0, b1, kp);
405 i_nround (b1, b0, kp); 420 i_nround(b1, b0, kp);
406 i_nround (b0, b1, kp); 421 i_nround(b0, b1, kp);
407 i_nround (b1, b0, kp); 422 i_nround(b1, b0, kp);
408 i_nround (b0, b1, kp); 423 i_nround(b0, b1, kp);
409 i_nround (b1, b0, kp); 424 i_nround(b1, b0, kp);
410 i_lround (b0, b1, kp); 425 i_lround(b0, b1, kp);
411 426
412 dst[0] = cpu_to_le32(b0[0]); 427 dst[0] = cpu_to_le32(b0[0]);
413 dst[1] = cpu_to_le32(b0[1]); 428 dst[1] = cpu_to_le32(b0[1]);
@@ -415,14 +430,13 @@ static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
415 dst[3] = cpu_to_le32(b0[3]); 430 dst[3] = cpu_to_le32(b0[3]);
416} 431}
417 432
418
419static struct crypto_alg aes_alg = { 433static struct crypto_alg aes_alg = {
420 .cra_name = "aes", 434 .cra_name = "aes",
421 .cra_driver_name = "aes-generic", 435 .cra_driver_name = "aes-generic",
422 .cra_priority = 100, 436 .cra_priority = 100,
423 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 437 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
424 .cra_blocksize = AES_BLOCK_SIZE, 438 .cra_blocksize = AES_BLOCK_SIZE,
425 .cra_ctxsize = sizeof(struct aes_ctx), 439 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
426 .cra_alignmask = 3, 440 .cra_alignmask = 3,
427 .cra_module = THIS_MODULE, 441 .cra_module = THIS_MODULE,
428 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list), 442 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
@@ -430,9 +444,9 @@ static struct crypto_alg aes_alg = {
430 .cipher = { 444 .cipher = {
431 .cia_min_keysize = AES_MIN_KEY_SIZE, 445 .cia_min_keysize = AES_MIN_KEY_SIZE,
432 .cia_max_keysize = AES_MAX_KEY_SIZE, 446 .cia_max_keysize = AES_MAX_KEY_SIZE,
433 .cia_setkey = aes_set_key, 447 .cia_setkey = crypto_aes_set_key,
434 .cia_encrypt = aes_encrypt, 448 .cia_encrypt = aes_encrypt,
435 .cia_decrypt = aes_decrypt 449 .cia_decrypt = aes_decrypt
436 } 450 }
437 } 451 }
438}; 452};
diff --git a/crypto/algapi.c b/crypto/algapi.c
index 8383282de1dd..e65cb50cf4af 100644
--- a/crypto/algapi.c
+++ b/crypto/algapi.c
@@ -472,7 +472,7 @@ int crypto_check_attr_type(struct rtattr **tb, u32 type)
472} 472}
473EXPORT_SYMBOL_GPL(crypto_check_attr_type); 473EXPORT_SYMBOL_GPL(crypto_check_attr_type);
474 474
475struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask) 475const char *crypto_attr_alg_name(struct rtattr *rta)
476{ 476{
477 struct crypto_attr_alg *alga; 477 struct crypto_attr_alg *alga;
478 478
@@ -486,7 +486,21 @@ struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask)
486 alga = RTA_DATA(rta); 486 alga = RTA_DATA(rta);
487 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0; 487 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
488 488
489 return crypto_alg_mod_lookup(alga->name, type, mask); 489 return alga->name;
490}
491EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
492
493struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask)
494{
495 const char *name;
496 int err;
497
498 name = crypto_attr_alg_name(rta);
499 err = PTR_ERR(name);
500 if (IS_ERR(name))
501 return ERR_PTR(err);
502
503 return crypto_alg_mod_lookup(name, type, mask);
490} 504}
491EXPORT_SYMBOL_GPL(crypto_attr_alg); 505EXPORT_SYMBOL_GPL(crypto_attr_alg);
492 506
@@ -605,6 +619,53 @@ int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm)
605} 619}
606EXPORT_SYMBOL_GPL(crypto_tfm_in_queue); 620EXPORT_SYMBOL_GPL(crypto_tfm_in_queue);
607 621
622static inline void crypto_inc_byte(u8 *a, unsigned int size)
623{
624 u8 *b = (a + size);
625 u8 c;
626
627 for (; size; size--) {
628 c = *--b + 1;
629 *b = c;
630 if (c)
631 break;
632 }
633}
634
635void crypto_inc(u8 *a, unsigned int size)
636{
637 __be32 *b = (__be32 *)(a + size);
638 u32 c;
639
640 for (; size >= 4; size -= 4) {
641 c = be32_to_cpu(*--b) + 1;
642 *b = cpu_to_be32(c);
643 if (c)
644 return;
645 }
646
647 crypto_inc_byte(a, size);
648}
649EXPORT_SYMBOL_GPL(crypto_inc);
650
651static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size)
652{
653 for (; size; size--)
654 *a++ ^= *b++;
655}
656
657void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
658{
659 u32 *a = (u32 *)dst;
660 u32 *b = (u32 *)src;
661
662 for (; size >= 4; size -= 4)
663 *a++ ^= *b++;
664
665 crypto_xor_byte((u8 *)a, (u8 *)b, size);
666}
667EXPORT_SYMBOL_GPL(crypto_xor);
668
608static int __init crypto_algapi_init(void) 669static int __init crypto_algapi_init(void)
609{ 670{
610 crypto_init_proc(); 671 crypto_init_proc();
diff --git a/crypto/api.c b/crypto/api.c
index 1f5c72477356..a2496d1bc6d4 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -137,7 +137,7 @@ static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type,
137 return alg; 137 return alg;
138} 138}
139 139
140static void crypto_larval_kill(struct crypto_alg *alg) 140void crypto_larval_kill(struct crypto_alg *alg)
141{ 141{
142 struct crypto_larval *larval = (void *)alg; 142 struct crypto_larval *larval = (void *)alg;
143 143
@@ -147,6 +147,7 @@ static void crypto_larval_kill(struct crypto_alg *alg)
147 complete_all(&larval->completion); 147 complete_all(&larval->completion);
148 crypto_alg_put(alg); 148 crypto_alg_put(alg);
149} 149}
150EXPORT_SYMBOL_GPL(crypto_larval_kill);
150 151
151static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) 152static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
152{ 153{
@@ -176,11 +177,9 @@ static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
176 return alg; 177 return alg;
177} 178}
178 179
179struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) 180struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
180{ 181{
181 struct crypto_alg *alg; 182 struct crypto_alg *alg;
182 struct crypto_alg *larval;
183 int ok;
184 183
185 if (!name) 184 if (!name)
186 return ERR_PTR(-ENOENT); 185 return ERR_PTR(-ENOENT);
@@ -193,7 +192,17 @@ struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
193 if (alg) 192 if (alg)
194 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; 193 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
195 194
196 larval = crypto_larval_alloc(name, type, mask); 195 return crypto_larval_alloc(name, type, mask);
196}
197EXPORT_SYMBOL_GPL(crypto_larval_lookup);
198
199struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
200{
201 struct crypto_alg *alg;
202 struct crypto_alg *larval;
203 int ok;
204
205 larval = crypto_larval_lookup(name, type, mask);
197 if (IS_ERR(larval) || !crypto_is_larval(larval)) 206 if (IS_ERR(larval) || !crypto_is_larval(larval))
198 return larval; 207 return larval;
199 208
diff --git a/crypto/authenc.c b/crypto/authenc.c
index 126a529b496d..ed8ac5a6fa5f 100644
--- a/crypto/authenc.c
+++ b/crypto/authenc.c
@@ -10,22 +10,21 @@
10 * 10 *
11 */ 11 */
12 12
13#include <crypto/algapi.h> 13#include <crypto/aead.h>
14#include <crypto/internal/skcipher.h>
15#include <crypto/authenc.h>
16#include <crypto/scatterwalk.h>
14#include <linux/err.h> 17#include <linux/err.h>
15#include <linux/init.h> 18#include <linux/init.h>
16#include <linux/kernel.h> 19#include <linux/kernel.h>
17#include <linux/module.h> 20#include <linux/module.h>
21#include <linux/rtnetlink.h>
18#include <linux/slab.h> 22#include <linux/slab.h>
19#include <linux/spinlock.h> 23#include <linux/spinlock.h>
20 24
21#include "scatterwalk.h"
22
23struct authenc_instance_ctx { 25struct authenc_instance_ctx {
24 struct crypto_spawn auth; 26 struct crypto_spawn auth;
25 struct crypto_spawn enc; 27 struct crypto_skcipher_spawn enc;
26
27 unsigned int authsize;
28 unsigned int enckeylen;
29}; 28};
30 29
31struct crypto_authenc_ctx { 30struct crypto_authenc_ctx {
@@ -37,19 +36,31 @@ struct crypto_authenc_ctx {
37static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key, 36static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
38 unsigned int keylen) 37 unsigned int keylen)
39{ 38{
40 struct authenc_instance_ctx *ictx =
41 crypto_instance_ctx(crypto_aead_alg_instance(authenc));
42 unsigned int enckeylen = ictx->enckeylen;
43 unsigned int authkeylen; 39 unsigned int authkeylen;
40 unsigned int enckeylen;
44 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); 41 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
45 struct crypto_hash *auth = ctx->auth; 42 struct crypto_hash *auth = ctx->auth;
46 struct crypto_ablkcipher *enc = ctx->enc; 43 struct crypto_ablkcipher *enc = ctx->enc;
44 struct rtattr *rta = (void *)key;
45 struct crypto_authenc_key_param *param;
47 int err = -EINVAL; 46 int err = -EINVAL;
48 47
49 if (keylen < enckeylen) { 48 if (!RTA_OK(rta, keylen))
50 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN); 49 goto badkey;
51 goto out; 50 if (rta->rta_type != CRYPTO_AUTHENC_KEYA_PARAM)
52 } 51 goto badkey;
52 if (RTA_PAYLOAD(rta) < sizeof(*param))
53 goto badkey;
54
55 param = RTA_DATA(rta);
56 enckeylen = be32_to_cpu(param->enckeylen);
57
58 key += RTA_ALIGN(rta->rta_len);
59 keylen -= RTA_ALIGN(rta->rta_len);
60
61 if (keylen < enckeylen)
62 goto badkey;
63
53 authkeylen = keylen - enckeylen; 64 authkeylen = keylen - enckeylen;
54 65
55 crypto_hash_clear_flags(auth, CRYPTO_TFM_REQ_MASK); 66 crypto_hash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
@@ -71,21 +82,38 @@ static int crypto_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
71 82
72out: 83out:
73 return err; 84 return err;
85
86badkey:
87 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
88 goto out;
74} 89}
75 90
76static int crypto_authenc_hash(struct aead_request *req) 91static void authenc_chain(struct scatterlist *head, struct scatterlist *sg,
92 int chain)
93{
94 if (chain) {
95 head->length += sg->length;
96 sg = scatterwalk_sg_next(sg);
97 }
98
99 if (sg)
100 scatterwalk_sg_chain(head, 2, sg);
101 else
102 sg_mark_end(head);
103}
104
105static u8 *crypto_authenc_hash(struct aead_request *req, unsigned int flags,
106 struct scatterlist *cipher,
107 unsigned int cryptlen)
77{ 108{
78 struct crypto_aead *authenc = crypto_aead_reqtfm(req); 109 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
79 struct authenc_instance_ctx *ictx =
80 crypto_instance_ctx(crypto_aead_alg_instance(authenc));
81 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); 110 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
82 struct crypto_hash *auth = ctx->auth; 111 struct crypto_hash *auth = ctx->auth;
83 struct hash_desc desc = { 112 struct hash_desc desc = {
84 .tfm = auth, 113 .tfm = auth,
114 .flags = aead_request_flags(req) & flags,
85 }; 115 };
86 u8 *hash = aead_request_ctx(req); 116 u8 *hash = aead_request_ctx(req);
87 struct scatterlist *dst = req->dst;
88 unsigned int cryptlen = req->cryptlen;
89 int err; 117 int err;
90 118
91 hash = (u8 *)ALIGN((unsigned long)hash + crypto_hash_alignmask(auth), 119 hash = (u8 *)ALIGN((unsigned long)hash + crypto_hash_alignmask(auth),
@@ -100,7 +128,7 @@ static int crypto_authenc_hash(struct aead_request *req)
100 if (err) 128 if (err)
101 goto auth_unlock; 129 goto auth_unlock;
102 130
103 err = crypto_hash_update(&desc, dst, cryptlen); 131 err = crypto_hash_update(&desc, cipher, cryptlen);
104 if (err) 132 if (err)
105 goto auth_unlock; 133 goto auth_unlock;
106 134
@@ -109,17 +137,53 @@ auth_unlock:
109 spin_unlock_bh(&ctx->auth_lock); 137 spin_unlock_bh(&ctx->auth_lock);
110 138
111 if (err) 139 if (err)
112 return err; 140 return ERR_PTR(err);
141
142 return hash;
143}
113 144
114 scatterwalk_map_and_copy(hash, dst, cryptlen, ictx->authsize, 1); 145static int crypto_authenc_genicv(struct aead_request *req, u8 *iv,
146 unsigned int flags)
147{
148 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
149 struct scatterlist *dst = req->dst;
150 struct scatterlist cipher[2];
151 struct page *dstp;
152 unsigned int ivsize = crypto_aead_ivsize(authenc);
153 unsigned int cryptlen;
154 u8 *vdst;
155 u8 *hash;
156
157 dstp = sg_page(dst);
158 vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + dst->offset;
159
160 sg_init_table(cipher, 2);
161 sg_set_buf(cipher, iv, ivsize);
162 authenc_chain(cipher, dst, vdst == iv + ivsize);
163
164 cryptlen = req->cryptlen + ivsize;
165 hash = crypto_authenc_hash(req, flags, cipher, cryptlen);
166 if (IS_ERR(hash))
167 return PTR_ERR(hash);
168
169 scatterwalk_map_and_copy(hash, cipher, cryptlen,
170 crypto_aead_authsize(authenc), 1);
115 return 0; 171 return 0;
116} 172}
117 173
118static void crypto_authenc_encrypt_done(struct crypto_async_request *req, 174static void crypto_authenc_encrypt_done(struct crypto_async_request *req,
119 int err) 175 int err)
120{ 176{
121 if (!err) 177 if (!err) {
122 err = crypto_authenc_hash(req->data); 178 struct aead_request *areq = req->data;
179 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
180 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
181 struct ablkcipher_request *abreq = aead_request_ctx(areq);
182 u8 *iv = (u8 *)(abreq + 1) +
183 crypto_ablkcipher_reqsize(ctx->enc);
184
185 err = crypto_authenc_genicv(areq, iv, 0);
186 }
123 187
124 aead_request_complete(req->data, err); 188 aead_request_complete(req->data, err);
125} 189}
@@ -129,72 +193,99 @@ static int crypto_authenc_encrypt(struct aead_request *req)
129 struct crypto_aead *authenc = crypto_aead_reqtfm(req); 193 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
130 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); 194 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
131 struct ablkcipher_request *abreq = aead_request_ctx(req); 195 struct ablkcipher_request *abreq = aead_request_ctx(req);
196 struct crypto_ablkcipher *enc = ctx->enc;
197 struct scatterlist *dst = req->dst;
198 unsigned int cryptlen = req->cryptlen;
199 u8 *iv = (u8 *)(abreq + 1) + crypto_ablkcipher_reqsize(enc);
132 int err; 200 int err;
133 201
134 ablkcipher_request_set_tfm(abreq, ctx->enc); 202 ablkcipher_request_set_tfm(abreq, enc);
135 ablkcipher_request_set_callback(abreq, aead_request_flags(req), 203 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
136 crypto_authenc_encrypt_done, req); 204 crypto_authenc_encrypt_done, req);
137 ablkcipher_request_set_crypt(abreq, req->src, req->dst, req->cryptlen, 205 ablkcipher_request_set_crypt(abreq, req->src, dst, cryptlen, req->iv);
138 req->iv); 206
207 memcpy(iv, req->iv, crypto_aead_ivsize(authenc));
139 208
140 err = crypto_ablkcipher_encrypt(abreq); 209 err = crypto_ablkcipher_encrypt(abreq);
141 if (err) 210 if (err)
142 return err; 211 return err;
143 212
144 return crypto_authenc_hash(req); 213 return crypto_authenc_genicv(req, iv, CRYPTO_TFM_REQ_MAY_SLEEP);
145} 214}
146 215
147static int crypto_authenc_verify(struct aead_request *req) 216static void crypto_authenc_givencrypt_done(struct crypto_async_request *req,
217 int err)
148{ 218{
149 struct crypto_aead *authenc = crypto_aead_reqtfm(req); 219 if (!err) {
150 struct authenc_instance_ctx *ictx = 220 struct aead_givcrypt_request *greq = req->data;
151 crypto_instance_ctx(crypto_aead_alg_instance(authenc)); 221
222 err = crypto_authenc_genicv(&greq->areq, greq->giv, 0);
223 }
224
225 aead_request_complete(req->data, err);
226}
227
228static int crypto_authenc_givencrypt(struct aead_givcrypt_request *req)
229{
230 struct crypto_aead *authenc = aead_givcrypt_reqtfm(req);
152 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); 231 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
153 struct crypto_hash *auth = ctx->auth; 232 struct aead_request *areq = &req->areq;
154 struct hash_desc desc = { 233 struct skcipher_givcrypt_request *greq = aead_request_ctx(areq);
155 .tfm = auth, 234 u8 *iv = req->giv;
156 .flags = aead_request_flags(req),
157 };
158 u8 *ohash = aead_request_ctx(req);
159 u8 *ihash;
160 struct scatterlist *src = req->src;
161 unsigned int cryptlen = req->cryptlen;
162 unsigned int authsize;
163 int err; 235 int err;
164 236
165 ohash = (u8 *)ALIGN((unsigned long)ohash + crypto_hash_alignmask(auth), 237 skcipher_givcrypt_set_tfm(greq, ctx->enc);
166 crypto_hash_alignmask(auth) + 1); 238 skcipher_givcrypt_set_callback(greq, aead_request_flags(areq),
167 ihash = ohash + crypto_hash_digestsize(auth); 239 crypto_authenc_givencrypt_done, areq);
168 240 skcipher_givcrypt_set_crypt(greq, areq->src, areq->dst, areq->cryptlen,
169 spin_lock_bh(&ctx->auth_lock); 241 areq->iv);
170 err = crypto_hash_init(&desc); 242 skcipher_givcrypt_set_giv(greq, iv, req->seq);
171 if (err)
172 goto auth_unlock;
173 243
174 err = crypto_hash_update(&desc, req->assoc, req->assoclen); 244 err = crypto_skcipher_givencrypt(greq);
175 if (err) 245 if (err)
176 goto auth_unlock; 246 return err;
177 247
178 err = crypto_hash_update(&desc, src, cryptlen); 248 return crypto_authenc_genicv(areq, iv, CRYPTO_TFM_REQ_MAY_SLEEP);
179 if (err) 249}
180 goto auth_unlock;
181 250
182 err = crypto_hash_final(&desc, ohash); 251static int crypto_authenc_verify(struct aead_request *req,
183auth_unlock: 252 struct scatterlist *cipher,
184 spin_unlock_bh(&ctx->auth_lock); 253 unsigned int cryptlen)
254{
255 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
256 u8 *ohash;
257 u8 *ihash;
258 unsigned int authsize;
185 259
186 if (err) 260 ohash = crypto_authenc_hash(req, CRYPTO_TFM_REQ_MAY_SLEEP, cipher,
187 return err; 261 cryptlen);
262 if (IS_ERR(ohash))
263 return PTR_ERR(ohash);
188 264
189 authsize = ictx->authsize; 265 authsize = crypto_aead_authsize(authenc);
190 scatterwalk_map_and_copy(ihash, src, cryptlen, authsize, 0); 266 ihash = ohash + authsize;
191 return memcmp(ihash, ohash, authsize) ? -EINVAL : 0; 267 scatterwalk_map_and_copy(ihash, cipher, cryptlen, authsize, 0);
268 return memcmp(ihash, ohash, authsize) ? -EBADMSG: 0;
192} 269}
193 270
194static void crypto_authenc_decrypt_done(struct crypto_async_request *req, 271static int crypto_authenc_iverify(struct aead_request *req, u8 *iv,
195 int err) 272 unsigned int cryptlen)
196{ 273{
197 aead_request_complete(req->data, err); 274 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
275 struct scatterlist *src = req->src;
276 struct scatterlist cipher[2];
277 struct page *srcp;
278 unsigned int ivsize = crypto_aead_ivsize(authenc);
279 u8 *vsrc;
280
281 srcp = sg_page(src);
282 vsrc = PageHighMem(srcp) ? NULL : page_address(srcp) + src->offset;
283
284 sg_init_table(cipher, 2);
285 sg_set_buf(cipher, iv, ivsize);
286 authenc_chain(cipher, src, vsrc == iv + ivsize);
287
288 return crypto_authenc_verify(req, cipher, cryptlen + ivsize);
198} 289}
199 290
200static int crypto_authenc_decrypt(struct aead_request *req) 291static int crypto_authenc_decrypt(struct aead_request *req)
@@ -202,17 +293,23 @@ static int crypto_authenc_decrypt(struct aead_request *req)
202 struct crypto_aead *authenc = crypto_aead_reqtfm(req); 293 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
203 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc); 294 struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
204 struct ablkcipher_request *abreq = aead_request_ctx(req); 295 struct ablkcipher_request *abreq = aead_request_ctx(req);
296 unsigned int cryptlen = req->cryptlen;
297 unsigned int authsize = crypto_aead_authsize(authenc);
298 u8 *iv = req->iv;
205 int err; 299 int err;
206 300
207 err = crypto_authenc_verify(req); 301 if (cryptlen < authsize)
302 return -EINVAL;
303 cryptlen -= authsize;
304
305 err = crypto_authenc_iverify(req, iv, cryptlen);
208 if (err) 306 if (err)
209 return err; 307 return err;
210 308
211 ablkcipher_request_set_tfm(abreq, ctx->enc); 309 ablkcipher_request_set_tfm(abreq, ctx->enc);
212 ablkcipher_request_set_callback(abreq, aead_request_flags(req), 310 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
213 crypto_authenc_decrypt_done, req); 311 req->base.complete, req->base.data);
214 ablkcipher_request_set_crypt(abreq, req->src, req->dst, req->cryptlen, 312 ablkcipher_request_set_crypt(abreq, req->src, req->dst, cryptlen, iv);
215 req->iv);
216 313
217 return crypto_ablkcipher_decrypt(abreq); 314 return crypto_ablkcipher_decrypt(abreq);
218} 315}
@@ -224,19 +321,13 @@ static int crypto_authenc_init_tfm(struct crypto_tfm *tfm)
224 struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm); 321 struct crypto_authenc_ctx *ctx = crypto_tfm_ctx(tfm);
225 struct crypto_hash *auth; 322 struct crypto_hash *auth;
226 struct crypto_ablkcipher *enc; 323 struct crypto_ablkcipher *enc;
227 unsigned int digestsize;
228 int err; 324 int err;
229 325
230 auth = crypto_spawn_hash(&ictx->auth); 326 auth = crypto_spawn_hash(&ictx->auth);
231 if (IS_ERR(auth)) 327 if (IS_ERR(auth))
232 return PTR_ERR(auth); 328 return PTR_ERR(auth);
233 329
234 err = -EINVAL; 330 enc = crypto_spawn_skcipher(&ictx->enc);
235 digestsize = crypto_hash_digestsize(auth);
236 if (ictx->authsize > digestsize)
237 goto err_free_hash;
238
239 enc = crypto_spawn_ablkcipher(&ictx->enc);
240 err = PTR_ERR(enc); 331 err = PTR_ERR(enc);
241 if (IS_ERR(enc)) 332 if (IS_ERR(enc))
242 goto err_free_hash; 333 goto err_free_hash;
@@ -246,9 +337,10 @@ static int crypto_authenc_init_tfm(struct crypto_tfm *tfm)
246 tfm->crt_aead.reqsize = max_t(unsigned int, 337 tfm->crt_aead.reqsize = max_t(unsigned int,
247 (crypto_hash_alignmask(auth) & 338 (crypto_hash_alignmask(auth) &
248 ~(crypto_tfm_ctx_alignment() - 1)) + 339 ~(crypto_tfm_ctx_alignment() - 1)) +
249 digestsize * 2, 340 crypto_hash_digestsize(auth) * 2,
250 sizeof(struct ablkcipher_request) + 341 sizeof(struct skcipher_givcrypt_request) +
251 crypto_ablkcipher_reqsize(enc)); 342 crypto_ablkcipher_reqsize(enc) +
343 crypto_ablkcipher_ivsize(enc));
252 344
253 spin_lock_init(&ctx->auth_lock); 345 spin_lock_init(&ctx->auth_lock);
254 346
@@ -269,75 +361,74 @@ static void crypto_authenc_exit_tfm(struct crypto_tfm *tfm)
269 361
270static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb) 362static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
271{ 363{
364 struct crypto_attr_type *algt;
272 struct crypto_instance *inst; 365 struct crypto_instance *inst;
273 struct crypto_alg *auth; 366 struct crypto_alg *auth;
274 struct crypto_alg *enc; 367 struct crypto_alg *enc;
275 struct authenc_instance_ctx *ctx; 368 struct authenc_instance_ctx *ctx;
276 unsigned int authsize; 369 const char *enc_name;
277 unsigned int enckeylen;
278 int err; 370 int err;
279 371
280 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD); 372 algt = crypto_get_attr_type(tb);
281 if (err) 373 err = PTR_ERR(algt);
374 if (IS_ERR(algt))
282 return ERR_PTR(err); 375 return ERR_PTR(err);
283 376
377 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
378 return ERR_PTR(-EINVAL);
379
284 auth = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH, 380 auth = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
285 CRYPTO_ALG_TYPE_HASH_MASK); 381 CRYPTO_ALG_TYPE_HASH_MASK);
286 if (IS_ERR(auth)) 382 if (IS_ERR(auth))
287 return ERR_PTR(PTR_ERR(auth)); 383 return ERR_PTR(PTR_ERR(auth));
288 384
289 err = crypto_attr_u32(tb[2], &authsize); 385 enc_name = crypto_attr_alg_name(tb[2]);
290 inst = ERR_PTR(err); 386 err = PTR_ERR(enc_name);
291 if (err) 387 if (IS_ERR(enc_name))
292 goto out_put_auth;
293
294 enc = crypto_attr_alg(tb[3], CRYPTO_ALG_TYPE_BLKCIPHER,
295 CRYPTO_ALG_TYPE_MASK);
296 inst = ERR_PTR(PTR_ERR(enc));
297 if (IS_ERR(enc))
298 goto out_put_auth; 388 goto out_put_auth;
299 389
300 err = crypto_attr_u32(tb[4], &enckeylen);
301 if (err)
302 goto out_put_enc;
303
304 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 390 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
305 err = -ENOMEM; 391 err = -ENOMEM;
306 if (!inst) 392 if (!inst)
307 goto out_put_enc; 393 goto out_put_auth;
308
309 err = -ENAMETOOLONG;
310 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
311 "authenc(%s,%u,%s,%u)", auth->cra_name, authsize,
312 enc->cra_name, enckeylen) >= CRYPTO_MAX_ALG_NAME)
313 goto err_free_inst;
314
315 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
316 "authenc(%s,%u,%s,%u)", auth->cra_driver_name,
317 authsize, enc->cra_driver_name, enckeylen) >=
318 CRYPTO_MAX_ALG_NAME)
319 goto err_free_inst;
320 394
321 ctx = crypto_instance_ctx(inst); 395 ctx = crypto_instance_ctx(inst);
322 ctx->authsize = authsize;
323 ctx->enckeylen = enckeylen;
324 396
325 err = crypto_init_spawn(&ctx->auth, auth, inst, CRYPTO_ALG_TYPE_MASK); 397 err = crypto_init_spawn(&ctx->auth, auth, inst, CRYPTO_ALG_TYPE_MASK);
326 if (err) 398 if (err)
327 goto err_free_inst; 399 goto err_free_inst;
328 400
329 err = crypto_init_spawn(&ctx->enc, enc, inst, CRYPTO_ALG_TYPE_MASK); 401 crypto_set_skcipher_spawn(&ctx->enc, inst);
402 err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
403 crypto_requires_sync(algt->type,
404 algt->mask));
330 if (err) 405 if (err)
331 goto err_drop_auth; 406 goto err_drop_auth;
332 407
333 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC; 408 enc = crypto_skcipher_spawn_alg(&ctx->enc);
409
410 err = -ENAMETOOLONG;
411 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
412 "authenc(%s,%s)", auth->cra_name, enc->cra_name) >=
413 CRYPTO_MAX_ALG_NAME)
414 goto err_drop_enc;
415
416 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
417 "authenc(%s,%s)", auth->cra_driver_name,
418 enc->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
419 goto err_drop_enc;
420
421 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
422 inst->alg.cra_flags |= enc->cra_flags & CRYPTO_ALG_ASYNC;
334 inst->alg.cra_priority = enc->cra_priority * 10 + auth->cra_priority; 423 inst->alg.cra_priority = enc->cra_priority * 10 + auth->cra_priority;
335 inst->alg.cra_blocksize = enc->cra_blocksize; 424 inst->alg.cra_blocksize = enc->cra_blocksize;
336 inst->alg.cra_alignmask = max(auth->cra_alignmask, enc->cra_alignmask); 425 inst->alg.cra_alignmask = auth->cra_alignmask | enc->cra_alignmask;
337 inst->alg.cra_type = &crypto_aead_type; 426 inst->alg.cra_type = &crypto_aead_type;
338 427
339 inst->alg.cra_aead.ivsize = enc->cra_blkcipher.ivsize; 428 inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize;
340 inst->alg.cra_aead.authsize = authsize; 429 inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ?
430 auth->cra_hash.digestsize :
431 auth->cra_digest.dia_digestsize;
341 432
342 inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx); 433 inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx);
343 434
@@ -347,18 +438,19 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
347 inst->alg.cra_aead.setkey = crypto_authenc_setkey; 438 inst->alg.cra_aead.setkey = crypto_authenc_setkey;
348 inst->alg.cra_aead.encrypt = crypto_authenc_encrypt; 439 inst->alg.cra_aead.encrypt = crypto_authenc_encrypt;
349 inst->alg.cra_aead.decrypt = crypto_authenc_decrypt; 440 inst->alg.cra_aead.decrypt = crypto_authenc_decrypt;
441 inst->alg.cra_aead.givencrypt = crypto_authenc_givencrypt;
350 442
351out: 443out:
352 crypto_mod_put(enc);
353out_put_auth:
354 crypto_mod_put(auth); 444 crypto_mod_put(auth);
355 return inst; 445 return inst;
356 446
447err_drop_enc:
448 crypto_drop_skcipher(&ctx->enc);
357err_drop_auth: 449err_drop_auth:
358 crypto_drop_spawn(&ctx->auth); 450 crypto_drop_spawn(&ctx->auth);
359err_free_inst: 451err_free_inst:
360 kfree(inst); 452 kfree(inst);
361out_put_enc: 453out_put_auth:
362 inst = ERR_PTR(err); 454 inst = ERR_PTR(err);
363 goto out; 455 goto out;
364} 456}
@@ -367,7 +459,7 @@ static void crypto_authenc_free(struct crypto_instance *inst)
367{ 459{
368 struct authenc_instance_ctx *ctx = crypto_instance_ctx(inst); 460 struct authenc_instance_ctx *ctx = crypto_instance_ctx(inst);
369 461
370 crypto_drop_spawn(&ctx->enc); 462 crypto_drop_skcipher(&ctx->enc);
371 crypto_drop_spawn(&ctx->auth); 463 crypto_drop_spawn(&ctx->auth);
372 kfree(inst); 464 kfree(inst);
373} 465}
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
index f6c67f9d4e5c..4a7e65c4df4d 100644
--- a/crypto/blkcipher.c
+++ b/crypto/blkcipher.c
@@ -14,7 +14,8 @@
14 * 14 *
15 */ 15 */
16 16
17#include <linux/crypto.h> 17#include <crypto/internal/skcipher.h>
18#include <crypto/scatterwalk.h>
18#include <linux/errno.h> 19#include <linux/errno.h>
19#include <linux/hardirq.h> 20#include <linux/hardirq.h>
20#include <linux/kernel.h> 21#include <linux/kernel.h>
@@ -25,7 +26,6 @@
25#include <linux/string.h> 26#include <linux/string.h>
26 27
27#include "internal.h" 28#include "internal.h"
28#include "scatterwalk.h"
29 29
30enum { 30enum {
31 BLKCIPHER_WALK_PHYS = 1 << 0, 31 BLKCIPHER_WALK_PHYS = 1 << 0,
@@ -433,9 +433,8 @@ static unsigned int crypto_blkcipher_ctxsize(struct crypto_alg *alg, u32 type,
433 struct blkcipher_alg *cipher = &alg->cra_blkcipher; 433 struct blkcipher_alg *cipher = &alg->cra_blkcipher;
434 unsigned int len = alg->cra_ctxsize; 434 unsigned int len = alg->cra_ctxsize;
435 435
436 type ^= CRYPTO_ALG_ASYNC; 436 if ((mask & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_MASK &&
437 mask &= CRYPTO_ALG_ASYNC; 437 cipher->ivsize) {
438 if ((type & mask) && cipher->ivsize) {
439 len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1); 438 len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1);
440 len += cipher->ivsize; 439 len += cipher->ivsize;
441 } 440 }
@@ -451,6 +450,11 @@ static int crypto_init_blkcipher_ops_async(struct crypto_tfm *tfm)
451 crt->setkey = async_setkey; 450 crt->setkey = async_setkey;
452 crt->encrypt = async_encrypt; 451 crt->encrypt = async_encrypt;
453 crt->decrypt = async_decrypt; 452 crt->decrypt = async_decrypt;
453 if (!alg->ivsize) {
454 crt->givencrypt = skcipher_null_givencrypt;
455 crt->givdecrypt = skcipher_null_givdecrypt;
456 }
457 crt->base = __crypto_ablkcipher_cast(tfm);
454 crt->ivsize = alg->ivsize; 458 crt->ivsize = alg->ivsize;
455 459
456 return 0; 460 return 0;
@@ -482,9 +486,7 @@ static int crypto_init_blkcipher_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
482 if (alg->ivsize > PAGE_SIZE / 8) 486 if (alg->ivsize > PAGE_SIZE / 8)
483 return -EINVAL; 487 return -EINVAL;
484 488
485 type ^= CRYPTO_ALG_ASYNC; 489 if ((mask & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_MASK)
486 mask &= CRYPTO_ALG_ASYNC;
487 if (type & mask)
488 return crypto_init_blkcipher_ops_sync(tfm); 490 return crypto_init_blkcipher_ops_sync(tfm);
489 else 491 else
490 return crypto_init_blkcipher_ops_async(tfm); 492 return crypto_init_blkcipher_ops_async(tfm);
@@ -499,6 +501,8 @@ static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
499 seq_printf(m, "min keysize : %u\n", alg->cra_blkcipher.min_keysize); 501 seq_printf(m, "min keysize : %u\n", alg->cra_blkcipher.min_keysize);
500 seq_printf(m, "max keysize : %u\n", alg->cra_blkcipher.max_keysize); 502 seq_printf(m, "max keysize : %u\n", alg->cra_blkcipher.max_keysize);
501 seq_printf(m, "ivsize : %u\n", alg->cra_blkcipher.ivsize); 503 seq_printf(m, "ivsize : %u\n", alg->cra_blkcipher.ivsize);
504 seq_printf(m, "geniv : %s\n", alg->cra_blkcipher.geniv ?:
505 "<default>");
502} 506}
503 507
504const struct crypto_type crypto_blkcipher_type = { 508const struct crypto_type crypto_blkcipher_type = {
@@ -510,5 +514,187 @@ const struct crypto_type crypto_blkcipher_type = {
510}; 514};
511EXPORT_SYMBOL_GPL(crypto_blkcipher_type); 515EXPORT_SYMBOL_GPL(crypto_blkcipher_type);
512 516
517static int crypto_grab_nivcipher(struct crypto_skcipher_spawn *spawn,
518 const char *name, u32 type, u32 mask)
519{
520 struct crypto_alg *alg;
521 int err;
522
523 type = crypto_skcipher_type(type);
524 mask = crypto_skcipher_mask(mask) | CRYPTO_ALG_GENIV;
525
526 alg = crypto_alg_mod_lookup(name, type, mask);
527 if (IS_ERR(alg))
528 return PTR_ERR(alg);
529
530 err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask);
531 crypto_mod_put(alg);
532 return err;
533}
534
535struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl,
536 struct rtattr **tb, u32 type,
537 u32 mask)
538{
539 struct {
540 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
541 unsigned int keylen);
542 int (*encrypt)(struct ablkcipher_request *req);
543 int (*decrypt)(struct ablkcipher_request *req);
544
545 unsigned int min_keysize;
546 unsigned int max_keysize;
547 unsigned int ivsize;
548
549 const char *geniv;
550 } balg;
551 const char *name;
552 struct crypto_skcipher_spawn *spawn;
553 struct crypto_attr_type *algt;
554 struct crypto_instance *inst;
555 struct crypto_alg *alg;
556 int err;
557
558 algt = crypto_get_attr_type(tb);
559 err = PTR_ERR(algt);
560 if (IS_ERR(algt))
561 return ERR_PTR(err);
562
563 if ((algt->type ^ (CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV)) &
564 algt->mask)
565 return ERR_PTR(-EINVAL);
566
567 name = crypto_attr_alg_name(tb[1]);
568 err = PTR_ERR(name);
569 if (IS_ERR(name))
570 return ERR_PTR(err);
571
572 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
573 if (!inst)
574 return ERR_PTR(-ENOMEM);
575
576 spawn = crypto_instance_ctx(inst);
577
578 /* Ignore async algorithms if necessary. */
579 mask |= crypto_requires_sync(algt->type, algt->mask);
580
581 crypto_set_skcipher_spawn(spawn, inst);
582 err = crypto_grab_nivcipher(spawn, name, type, mask);
583 if (err)
584 goto err_free_inst;
585
586 alg = crypto_skcipher_spawn_alg(spawn);
587
588 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
589 CRYPTO_ALG_TYPE_BLKCIPHER) {
590 balg.ivsize = alg->cra_blkcipher.ivsize;
591 balg.min_keysize = alg->cra_blkcipher.min_keysize;
592 balg.max_keysize = alg->cra_blkcipher.max_keysize;
593
594 balg.setkey = async_setkey;
595 balg.encrypt = async_encrypt;
596 balg.decrypt = async_decrypt;
597
598 balg.geniv = alg->cra_blkcipher.geniv;
599 } else {
600 balg.ivsize = alg->cra_ablkcipher.ivsize;
601 balg.min_keysize = alg->cra_ablkcipher.min_keysize;
602 balg.max_keysize = alg->cra_ablkcipher.max_keysize;
603
604 balg.setkey = alg->cra_ablkcipher.setkey;
605 balg.encrypt = alg->cra_ablkcipher.encrypt;
606 balg.decrypt = alg->cra_ablkcipher.decrypt;
607
608 balg.geniv = alg->cra_ablkcipher.geniv;
609 }
610
611 err = -EINVAL;
612 if (!balg.ivsize)
613 goto err_drop_alg;
614
615 /*
616 * This is only true if we're constructing an algorithm with its
617 * default IV generator. For the default generator we elide the
618 * template name and double-check the IV generator.
619 */
620 if (algt->mask & CRYPTO_ALG_GENIV) {
621 if (!balg.geniv)
622 balg.geniv = crypto_default_geniv(alg);
623 err = -EAGAIN;
624 if (strcmp(tmpl->name, balg.geniv))
625 goto err_drop_alg;
626
627 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
628 memcpy(inst->alg.cra_driver_name, alg->cra_driver_name,
629 CRYPTO_MAX_ALG_NAME);
630 } else {
631 err = -ENAMETOOLONG;
632 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
633 "%s(%s)", tmpl->name, alg->cra_name) >=
634 CRYPTO_MAX_ALG_NAME)
635 goto err_drop_alg;
636 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
637 "%s(%s)", tmpl->name, alg->cra_driver_name) >=
638 CRYPTO_MAX_ALG_NAME)
639 goto err_drop_alg;
640 }
641
642 inst->alg.cra_flags = CRYPTO_ALG_TYPE_GIVCIPHER | CRYPTO_ALG_GENIV;
643 inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
644 inst->alg.cra_priority = alg->cra_priority;
645 inst->alg.cra_blocksize = alg->cra_blocksize;
646 inst->alg.cra_alignmask = alg->cra_alignmask;
647 inst->alg.cra_type = &crypto_givcipher_type;
648
649 inst->alg.cra_ablkcipher.ivsize = balg.ivsize;
650 inst->alg.cra_ablkcipher.min_keysize = balg.min_keysize;
651 inst->alg.cra_ablkcipher.max_keysize = balg.max_keysize;
652 inst->alg.cra_ablkcipher.geniv = balg.geniv;
653
654 inst->alg.cra_ablkcipher.setkey = balg.setkey;
655 inst->alg.cra_ablkcipher.encrypt = balg.encrypt;
656 inst->alg.cra_ablkcipher.decrypt = balg.decrypt;
657
658out:
659 return inst;
660
661err_drop_alg:
662 crypto_drop_skcipher(spawn);
663err_free_inst:
664 kfree(inst);
665 inst = ERR_PTR(err);
666 goto out;
667}
668EXPORT_SYMBOL_GPL(skcipher_geniv_alloc);
669
670void skcipher_geniv_free(struct crypto_instance *inst)
671{
672 crypto_drop_skcipher(crypto_instance_ctx(inst));
673 kfree(inst);
674}
675EXPORT_SYMBOL_GPL(skcipher_geniv_free);
676
677int skcipher_geniv_init(struct crypto_tfm *tfm)
678{
679 struct crypto_instance *inst = (void *)tfm->__crt_alg;
680 struct crypto_ablkcipher *cipher;
681
682 cipher = crypto_spawn_skcipher(crypto_instance_ctx(inst));
683 if (IS_ERR(cipher))
684 return PTR_ERR(cipher);
685
686 tfm->crt_ablkcipher.base = cipher;
687 tfm->crt_ablkcipher.reqsize += crypto_ablkcipher_reqsize(cipher);
688
689 return 0;
690}
691EXPORT_SYMBOL_GPL(skcipher_geniv_init);
692
693void skcipher_geniv_exit(struct crypto_tfm *tfm)
694{
695 crypto_free_ablkcipher(tfm->crt_ablkcipher.base);
696}
697EXPORT_SYMBOL_GPL(skcipher_geniv_exit);
698
513MODULE_LICENSE("GPL"); 699MODULE_LICENSE("GPL");
514MODULE_DESCRIPTION("Generic block chaining cipher type"); 700MODULE_DESCRIPTION("Generic block chaining cipher type");
diff --git a/crypto/camellia.c b/crypto/camellia.c
index 6877ecfd90bb..493fee7e0a8b 100644
--- a/crypto/camellia.c
+++ b/crypto/camellia.c
@@ -36,176 +36,6 @@
36#include <linux/kernel.h> 36#include <linux/kernel.h>
37#include <linux/module.h> 37#include <linux/module.h>
38 38
39
40#define CAMELLIA_MIN_KEY_SIZE 16
41#define CAMELLIA_MAX_KEY_SIZE 32
42#define CAMELLIA_BLOCK_SIZE 16
43#define CAMELLIA_TABLE_BYTE_LEN 272
44#define CAMELLIA_TABLE_WORD_LEN (CAMELLIA_TABLE_BYTE_LEN / 4)
45
46typedef u32 KEY_TABLE_TYPE[CAMELLIA_TABLE_WORD_LEN];
47
48
49/* key constants */
50
51#define CAMELLIA_SIGMA1L (0xA09E667FL)
52#define CAMELLIA_SIGMA1R (0x3BCC908BL)
53#define CAMELLIA_SIGMA2L (0xB67AE858L)
54#define CAMELLIA_SIGMA2R (0x4CAA73B2L)
55#define CAMELLIA_SIGMA3L (0xC6EF372FL)
56#define CAMELLIA_SIGMA3R (0xE94F82BEL)
57#define CAMELLIA_SIGMA4L (0x54FF53A5L)
58#define CAMELLIA_SIGMA4R (0xF1D36F1CL)
59#define CAMELLIA_SIGMA5L (0x10E527FAL)
60#define CAMELLIA_SIGMA5R (0xDE682D1DL)
61#define CAMELLIA_SIGMA6L (0xB05688C2L)
62#define CAMELLIA_SIGMA6R (0xB3E6C1FDL)
63
64struct camellia_ctx {
65 int key_length;
66 KEY_TABLE_TYPE key_table;
67};
68
69
70/*
71 * macros
72 */
73
74
75# define GETU32(pt) (((u32)(pt)[0] << 24) \
76 ^ ((u32)(pt)[1] << 16) \
77 ^ ((u32)(pt)[2] << 8) \
78 ^ ((u32)(pt)[3]))
79
80#define COPY4WORD(dst, src) \
81 do { \
82 (dst)[0]=(src)[0]; \
83 (dst)[1]=(src)[1]; \
84 (dst)[2]=(src)[2]; \
85 (dst)[3]=(src)[3]; \
86 }while(0)
87
88#define SWAP4WORD(word) \
89 do { \
90 CAMELLIA_SWAP4((word)[0]); \
91 CAMELLIA_SWAP4((word)[1]); \
92 CAMELLIA_SWAP4((word)[2]); \
93 CAMELLIA_SWAP4((word)[3]); \
94 }while(0)
95
96#define XOR4WORD(a, b)/* a = a ^ b */ \
97 do { \
98 (a)[0]^=(b)[0]; \
99 (a)[1]^=(b)[1]; \
100 (a)[2]^=(b)[2]; \
101 (a)[3]^=(b)[3]; \
102 }while(0)
103
104#define XOR4WORD2(a, b, c)/* a = b ^ c */ \
105 do { \
106 (a)[0]=(b)[0]^(c)[0]; \
107 (a)[1]=(b)[1]^(c)[1]; \
108 (a)[2]=(b)[2]^(c)[2]; \
109 (a)[3]=(b)[3]^(c)[3]; \
110 }while(0)
111
112#define CAMELLIA_SUBKEY_L(INDEX) (subkey[(INDEX)*2])
113#define CAMELLIA_SUBKEY_R(INDEX) (subkey[(INDEX)*2 + 1])
114
115/* rotation right shift 1byte */
116#define CAMELLIA_RR8(x) (((x) >> 8) + ((x) << 24))
117/* rotation left shift 1bit */
118#define CAMELLIA_RL1(x) (((x) << 1) + ((x) >> 31))
119/* rotation left shift 1byte */
120#define CAMELLIA_RL8(x) (((x) << 8) + ((x) >> 24))
121
122#define CAMELLIA_ROLDQ(ll, lr, rl, rr, w0, w1, bits) \
123 do { \
124 w0 = ll; \
125 ll = (ll << bits) + (lr >> (32 - bits)); \
126 lr = (lr << bits) + (rl >> (32 - bits)); \
127 rl = (rl << bits) + (rr >> (32 - bits)); \
128 rr = (rr << bits) + (w0 >> (32 - bits)); \
129 } while(0)
130
131#define CAMELLIA_ROLDQo32(ll, lr, rl, rr, w0, w1, bits) \
132 do { \
133 w0 = ll; \
134 w1 = lr; \
135 ll = (lr << (bits - 32)) + (rl >> (64 - bits)); \
136 lr = (rl << (bits - 32)) + (rr >> (64 - bits)); \
137 rl = (rr << (bits - 32)) + (w0 >> (64 - bits)); \
138 rr = (w0 << (bits - 32)) + (w1 >> (64 - bits)); \
139 } while(0)
140
141#define CAMELLIA_SP1110(INDEX) (camellia_sp1110[(INDEX)])
142#define CAMELLIA_SP0222(INDEX) (camellia_sp0222[(INDEX)])
143#define CAMELLIA_SP3033(INDEX) (camellia_sp3033[(INDEX)])
144#define CAMELLIA_SP4404(INDEX) (camellia_sp4404[(INDEX)])
145
146#define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) \
147 do { \
148 il = xl ^ kl; \
149 ir = xr ^ kr; \
150 t0 = il >> 16; \
151 t1 = ir >> 16; \
152 yl = CAMELLIA_SP1110(ir & 0xff) \
153 ^ CAMELLIA_SP0222((t1 >> 8) & 0xff) \
154 ^ CAMELLIA_SP3033(t1 & 0xff) \
155 ^ CAMELLIA_SP4404((ir >> 8) & 0xff); \
156 yr = CAMELLIA_SP1110((t0 >> 8) & 0xff) \
157 ^ CAMELLIA_SP0222(t0 & 0xff) \
158 ^ CAMELLIA_SP3033((il >> 8) & 0xff) \
159 ^ CAMELLIA_SP4404(il & 0xff); \
160 yl ^= yr; \
161 yr = CAMELLIA_RR8(yr); \
162 yr ^= yl; \
163 } while(0)
164
165
166/*
167 * for speed up
168 *
169 */
170#define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) \
171 do { \
172 t0 = kll; \
173 t2 = krr; \
174 t0 &= ll; \
175 t2 |= rr; \
176 rl ^= t2; \
177 lr ^= CAMELLIA_RL1(t0); \
178 t3 = krl; \
179 t1 = klr; \
180 t3 &= rl; \
181 t1 |= lr; \
182 ll ^= t1; \
183 rr ^= CAMELLIA_RL1(t3); \
184 } while(0)
185
186#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) \
187 do { \
188 ir = CAMELLIA_SP1110(xr & 0xff); \
189 il = CAMELLIA_SP1110((xl>>24) & 0xff); \
190 ir ^= CAMELLIA_SP0222((xr>>24) & 0xff); \
191 il ^= CAMELLIA_SP0222((xl>>16) & 0xff); \
192 ir ^= CAMELLIA_SP3033((xr>>16) & 0xff); \
193 il ^= CAMELLIA_SP3033((xl>>8) & 0xff); \
194 ir ^= CAMELLIA_SP4404((xr>>8) & 0xff); \
195 il ^= CAMELLIA_SP4404(xl & 0xff); \
196 il ^= kl; \
197 ir ^= il ^ kr; \
198 yl ^= ir; \
199 yr ^= CAMELLIA_RR8(il) ^ ir; \
200 } while(0)
201
202/**
203 * Stuff related to the Camellia key schedule
204 */
205#define SUBL(x) subL[(x)]
206#define SUBR(x) subR[(x)]
207
208
209static const u32 camellia_sp1110[256] = { 39static const u32 camellia_sp1110[256] = {
210 0x70707000,0x82828200,0x2c2c2c00,0xececec00, 40 0x70707000,0x82828200,0x2c2c2c00,0xececec00,
211 0xb3b3b300,0x27272700,0xc0c0c000,0xe5e5e500, 41 0xb3b3b300,0x27272700,0xc0c0c000,0xe5e5e500,
@@ -475,67 +305,348 @@ static const u32 camellia_sp4404[256] = {
475}; 305};
476 306
477 307
308#define CAMELLIA_MIN_KEY_SIZE 16
309#define CAMELLIA_MAX_KEY_SIZE 32
310#define CAMELLIA_BLOCK_SIZE 16
311#define CAMELLIA_TABLE_BYTE_LEN 272
312
313/*
314 * NB: L and R below stand for 'left' and 'right' as in written numbers.
315 * That is, in (xxxL,xxxR) pair xxxL holds most significant digits,
316 * _not_ least significant ones!
317 */
318
319
320/* key constants */
321
322#define CAMELLIA_SIGMA1L (0xA09E667FL)
323#define CAMELLIA_SIGMA1R (0x3BCC908BL)
324#define CAMELLIA_SIGMA2L (0xB67AE858L)
325#define CAMELLIA_SIGMA2R (0x4CAA73B2L)
326#define CAMELLIA_SIGMA3L (0xC6EF372FL)
327#define CAMELLIA_SIGMA3R (0xE94F82BEL)
328#define CAMELLIA_SIGMA4L (0x54FF53A5L)
329#define CAMELLIA_SIGMA4R (0xF1D36F1CL)
330#define CAMELLIA_SIGMA5L (0x10E527FAL)
331#define CAMELLIA_SIGMA5R (0xDE682D1DL)
332#define CAMELLIA_SIGMA6L (0xB05688C2L)
333#define CAMELLIA_SIGMA6R (0xB3E6C1FDL)
334
335/*
336 * macros
337 */
338#define GETU32(v, pt) \
339 do { \
340 /* latest breed of gcc is clever enough to use move */ \
341 memcpy(&(v), (pt), 4); \
342 (v) = be32_to_cpu(v); \
343 } while(0)
344
345/* rotation right shift 1byte */
346#define ROR8(x) (((x) >> 8) + ((x) << 24))
347/* rotation left shift 1bit */
348#define ROL1(x) (((x) << 1) + ((x) >> 31))
349/* rotation left shift 1byte */
350#define ROL8(x) (((x) << 8) + ((x) >> 24))
351
352#define ROLDQ(ll, lr, rl, rr, w0, w1, bits) \
353 do { \
354 w0 = ll; \
355 ll = (ll << bits) + (lr >> (32 - bits)); \
356 lr = (lr << bits) + (rl >> (32 - bits)); \
357 rl = (rl << bits) + (rr >> (32 - bits)); \
358 rr = (rr << bits) + (w0 >> (32 - bits)); \
359 } while(0)
360
361#define ROLDQo32(ll, lr, rl, rr, w0, w1, bits) \
362 do { \
363 w0 = ll; \
364 w1 = lr; \
365 ll = (lr << (bits - 32)) + (rl >> (64 - bits)); \
366 lr = (rl << (bits - 32)) + (rr >> (64 - bits)); \
367 rl = (rr << (bits - 32)) + (w0 >> (64 - bits)); \
368 rr = (w0 << (bits - 32)) + (w1 >> (64 - bits)); \
369 } while(0)
370
371#define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) \
372 do { \
373 il = xl ^ kl; \
374 ir = xr ^ kr; \
375 t0 = il >> 16; \
376 t1 = ir >> 16; \
377 yl = camellia_sp1110[(u8)(ir )] \
378 ^ camellia_sp0222[ (t1 >> 8)] \
379 ^ camellia_sp3033[(u8)(t1 )] \
380 ^ camellia_sp4404[(u8)(ir >> 8)]; \
381 yr = camellia_sp1110[ (t0 >> 8)] \
382 ^ camellia_sp0222[(u8)(t0 )] \
383 ^ camellia_sp3033[(u8)(il >> 8)] \
384 ^ camellia_sp4404[(u8)(il )]; \
385 yl ^= yr; \
386 yr = ROR8(yr); \
387 yr ^= yl; \
388 } while(0)
389
390#define SUBKEY_L(INDEX) (subkey[(INDEX)*2])
391#define SUBKEY_R(INDEX) (subkey[(INDEX)*2 + 1])
392
393static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
394{
395 u32 dw, tl, tr;
396 u32 kw4l, kw4r;
397 int i;
398
399 /* absorb kw2 to other subkeys */
400 /* round 2 */
401 subL[3] ^= subL[1]; subR[3] ^= subR[1];
402 /* round 4 */
403 subL[5] ^= subL[1]; subR[5] ^= subR[1];
404 /* round 6 */
405 subL[7] ^= subL[1]; subR[7] ^= subR[1];
406 subL[1] ^= subR[1] & ~subR[9];
407 dw = subL[1] & subL[9],
408 subR[1] ^= ROL1(dw); /* modified for FLinv(kl2) */
409 /* round 8 */
410 subL[11] ^= subL[1]; subR[11] ^= subR[1];
411 /* round 10 */
412 subL[13] ^= subL[1]; subR[13] ^= subR[1];
413 /* round 12 */
414 subL[15] ^= subL[1]; subR[15] ^= subR[1];
415 subL[1] ^= subR[1] & ~subR[17];
416 dw = subL[1] & subL[17],
417 subR[1] ^= ROL1(dw); /* modified for FLinv(kl4) */
418 /* round 14 */
419 subL[19] ^= subL[1]; subR[19] ^= subR[1];
420 /* round 16 */
421 subL[21] ^= subL[1]; subR[21] ^= subR[1];
422 /* round 18 */
423 subL[23] ^= subL[1]; subR[23] ^= subR[1];
424 if (max == 24) {
425 /* kw3 */
426 subL[24] ^= subL[1]; subR[24] ^= subR[1];
427
428 /* absorb kw4 to other subkeys */
429 kw4l = subL[25]; kw4r = subR[25];
430 } else {
431 subL[1] ^= subR[1] & ~subR[25];
432 dw = subL[1] & subL[25],
433 subR[1] ^= ROL1(dw); /* modified for FLinv(kl6) */
434 /* round 20 */
435 subL[27] ^= subL[1]; subR[27] ^= subR[1];
436 /* round 22 */
437 subL[29] ^= subL[1]; subR[29] ^= subR[1];
438 /* round 24 */
439 subL[31] ^= subL[1]; subR[31] ^= subR[1];
440 /* kw3 */
441 subL[32] ^= subL[1]; subR[32] ^= subR[1];
442
443 /* absorb kw4 to other subkeys */
444 kw4l = subL[33]; kw4r = subR[33];
445 /* round 23 */
446 subL[30] ^= kw4l; subR[30] ^= kw4r;
447 /* round 21 */
448 subL[28] ^= kw4l; subR[28] ^= kw4r;
449 /* round 19 */
450 subL[26] ^= kw4l; subR[26] ^= kw4r;
451 kw4l ^= kw4r & ~subR[24];
452 dw = kw4l & subL[24],
453 kw4r ^= ROL1(dw); /* modified for FL(kl5) */
454 }
455 /* round 17 */
456 subL[22] ^= kw4l; subR[22] ^= kw4r;
457 /* round 15 */
458 subL[20] ^= kw4l; subR[20] ^= kw4r;
459 /* round 13 */
460 subL[18] ^= kw4l; subR[18] ^= kw4r;
461 kw4l ^= kw4r & ~subR[16];
462 dw = kw4l & subL[16],
463 kw4r ^= ROL1(dw); /* modified for FL(kl3) */
464 /* round 11 */
465 subL[14] ^= kw4l; subR[14] ^= kw4r;
466 /* round 9 */
467 subL[12] ^= kw4l; subR[12] ^= kw4r;
468 /* round 7 */
469 subL[10] ^= kw4l; subR[10] ^= kw4r;
470 kw4l ^= kw4r & ~subR[8];
471 dw = kw4l & subL[8],
472 kw4r ^= ROL1(dw); /* modified for FL(kl1) */
473 /* round 5 */
474 subL[6] ^= kw4l; subR[6] ^= kw4r;
475 /* round 3 */
476 subL[4] ^= kw4l; subR[4] ^= kw4r;
477 /* round 1 */
478 subL[2] ^= kw4l; subR[2] ^= kw4r;
479 /* kw1 */
480 subL[0] ^= kw4l; subR[0] ^= kw4r;
481
482 /* key XOR is end of F-function */
483 SUBKEY_L(0) = subL[0] ^ subL[2];/* kw1 */
484 SUBKEY_R(0) = subR[0] ^ subR[2];
485 SUBKEY_L(2) = subL[3]; /* round 1 */
486 SUBKEY_R(2) = subR[3];
487 SUBKEY_L(3) = subL[2] ^ subL[4]; /* round 2 */
488 SUBKEY_R(3) = subR[2] ^ subR[4];
489 SUBKEY_L(4) = subL[3] ^ subL[5]; /* round 3 */
490 SUBKEY_R(4) = subR[3] ^ subR[5];
491 SUBKEY_L(5) = subL[4] ^ subL[6]; /* round 4 */
492 SUBKEY_R(5) = subR[4] ^ subR[6];
493 SUBKEY_L(6) = subL[5] ^ subL[7]; /* round 5 */
494 SUBKEY_R(6) = subR[5] ^ subR[7];
495 tl = subL[10] ^ (subR[10] & ~subR[8]);
496 dw = tl & subL[8], /* FL(kl1) */
497 tr = subR[10] ^ ROL1(dw);
498 SUBKEY_L(7) = subL[6] ^ tl; /* round 6 */
499 SUBKEY_R(7) = subR[6] ^ tr;
500 SUBKEY_L(8) = subL[8]; /* FL(kl1) */
501 SUBKEY_R(8) = subR[8];
502 SUBKEY_L(9) = subL[9]; /* FLinv(kl2) */
503 SUBKEY_R(9) = subR[9];
504 tl = subL[7] ^ (subR[7] & ~subR[9]);
505 dw = tl & subL[9], /* FLinv(kl2) */
506 tr = subR[7] ^ ROL1(dw);
507 SUBKEY_L(10) = tl ^ subL[11]; /* round 7 */
508 SUBKEY_R(10) = tr ^ subR[11];
509 SUBKEY_L(11) = subL[10] ^ subL[12]; /* round 8 */
510 SUBKEY_R(11) = subR[10] ^ subR[12];
511 SUBKEY_L(12) = subL[11] ^ subL[13]; /* round 9 */
512 SUBKEY_R(12) = subR[11] ^ subR[13];
513 SUBKEY_L(13) = subL[12] ^ subL[14]; /* round 10 */
514 SUBKEY_R(13) = subR[12] ^ subR[14];
515 SUBKEY_L(14) = subL[13] ^ subL[15]; /* round 11 */
516 SUBKEY_R(14) = subR[13] ^ subR[15];
517 tl = subL[18] ^ (subR[18] & ~subR[16]);
518 dw = tl & subL[16], /* FL(kl3) */
519 tr = subR[18] ^ ROL1(dw);
520 SUBKEY_L(15) = subL[14] ^ tl; /* round 12 */
521 SUBKEY_R(15) = subR[14] ^ tr;
522 SUBKEY_L(16) = subL[16]; /* FL(kl3) */
523 SUBKEY_R(16) = subR[16];
524 SUBKEY_L(17) = subL[17]; /* FLinv(kl4) */
525 SUBKEY_R(17) = subR[17];
526 tl = subL[15] ^ (subR[15] & ~subR[17]);
527 dw = tl & subL[17], /* FLinv(kl4) */
528 tr = subR[15] ^ ROL1(dw);
529 SUBKEY_L(18) = tl ^ subL[19]; /* round 13 */
530 SUBKEY_R(18) = tr ^ subR[19];
531 SUBKEY_L(19) = subL[18] ^ subL[20]; /* round 14 */
532 SUBKEY_R(19) = subR[18] ^ subR[20];
533 SUBKEY_L(20) = subL[19] ^ subL[21]; /* round 15 */
534 SUBKEY_R(20) = subR[19] ^ subR[21];
535 SUBKEY_L(21) = subL[20] ^ subL[22]; /* round 16 */
536 SUBKEY_R(21) = subR[20] ^ subR[22];
537 SUBKEY_L(22) = subL[21] ^ subL[23]; /* round 17 */
538 SUBKEY_R(22) = subR[21] ^ subR[23];
539 if (max == 24) {
540 SUBKEY_L(23) = subL[22]; /* round 18 */
541 SUBKEY_R(23) = subR[22];
542 SUBKEY_L(24) = subL[24] ^ subL[23]; /* kw3 */
543 SUBKEY_R(24) = subR[24] ^ subR[23];
544 } else {
545 tl = subL[26] ^ (subR[26] & ~subR[24]);
546 dw = tl & subL[24], /* FL(kl5) */
547 tr = subR[26] ^ ROL1(dw);
548 SUBKEY_L(23) = subL[22] ^ tl; /* round 18 */
549 SUBKEY_R(23) = subR[22] ^ tr;
550 SUBKEY_L(24) = subL[24]; /* FL(kl5) */
551 SUBKEY_R(24) = subR[24];
552 SUBKEY_L(25) = subL[25]; /* FLinv(kl6) */
553 SUBKEY_R(25) = subR[25];
554 tl = subL[23] ^ (subR[23] & ~subR[25]);
555 dw = tl & subL[25], /* FLinv(kl6) */
556 tr = subR[23] ^ ROL1(dw);
557 SUBKEY_L(26) = tl ^ subL[27]; /* round 19 */
558 SUBKEY_R(26) = tr ^ subR[27];
559 SUBKEY_L(27) = subL[26] ^ subL[28]; /* round 20 */
560 SUBKEY_R(27) = subR[26] ^ subR[28];
561 SUBKEY_L(28) = subL[27] ^ subL[29]; /* round 21 */
562 SUBKEY_R(28) = subR[27] ^ subR[29];
563 SUBKEY_L(29) = subL[28] ^ subL[30]; /* round 22 */
564 SUBKEY_R(29) = subR[28] ^ subR[30];
565 SUBKEY_L(30) = subL[29] ^ subL[31]; /* round 23 */
566 SUBKEY_R(30) = subR[29] ^ subR[31];
567 SUBKEY_L(31) = subL[30]; /* round 24 */
568 SUBKEY_R(31) = subR[30];
569 SUBKEY_L(32) = subL[32] ^ subL[31]; /* kw3 */
570 SUBKEY_R(32) = subR[32] ^ subR[31];
571 }
572
573 /* apply the inverse of the last half of P-function */
574 i = 2;
575 do {
576 dw = SUBKEY_L(i + 0) ^ SUBKEY_R(i + 0); dw = ROL8(dw);/* round 1 */
577 SUBKEY_R(i + 0) = SUBKEY_L(i + 0) ^ dw; SUBKEY_L(i + 0) = dw;
578 dw = SUBKEY_L(i + 1) ^ SUBKEY_R(i + 1); dw = ROL8(dw);/* round 2 */
579 SUBKEY_R(i + 1) = SUBKEY_L(i + 1) ^ dw; SUBKEY_L(i + 1) = dw;
580 dw = SUBKEY_L(i + 2) ^ SUBKEY_R(i + 2); dw = ROL8(dw);/* round 3 */
581 SUBKEY_R(i + 2) = SUBKEY_L(i + 2) ^ dw; SUBKEY_L(i + 2) = dw;
582 dw = SUBKEY_L(i + 3) ^ SUBKEY_R(i + 3); dw = ROL8(dw);/* round 4 */
583 SUBKEY_R(i + 3) = SUBKEY_L(i + 3) ^ dw; SUBKEY_L(i + 3) = dw;
584 dw = SUBKEY_L(i + 4) ^ SUBKEY_R(i + 4); dw = ROL8(dw);/* round 5 */
585 SUBKEY_R(i + 4) = SUBKEY_L(i + 4) ^ dw; SUBKEY_L(i + 4) = dw;
586 dw = SUBKEY_L(i + 5) ^ SUBKEY_R(i + 5); dw = ROL8(dw);/* round 6 */
587 SUBKEY_R(i + 5) = SUBKEY_L(i + 5) ^ dw; SUBKEY_L(i + 5) = dw;
588 i += 8;
589 } while (i < max);
590}
478 591
479static void camellia_setup128(const unsigned char *key, u32 *subkey) 592static void camellia_setup128(const unsigned char *key, u32 *subkey)
480{ 593{
481 u32 kll, klr, krl, krr; 594 u32 kll, klr, krl, krr;
482 u32 il, ir, t0, t1, w0, w1; 595 u32 il, ir, t0, t1, w0, w1;
483 u32 kw4l, kw4r, dw, tl, tr;
484 u32 subL[26]; 596 u32 subL[26];
485 u32 subR[26]; 597 u32 subR[26];
486 598
487 /** 599 /**
488 * k == kll || klr || krl || krr (|| is concatination) 600 * k == kll || klr || krl || krr (|| is concatenation)
489 */
490 kll = GETU32(key );
491 klr = GETU32(key + 4);
492 krl = GETU32(key + 8);
493 krr = GETU32(key + 12);
494 /**
495 * generate KL dependent subkeys
496 */ 601 */
602 GETU32(kll, key );
603 GETU32(klr, key + 4);
604 GETU32(krl, key + 8);
605 GETU32(krr, key + 12);
606
607 /* generate KL dependent subkeys */
497 /* kw1 */ 608 /* kw1 */
498 SUBL(0) = kll; SUBR(0) = klr; 609 subL[0] = kll; subR[0] = klr;
499 /* kw2 */ 610 /* kw2 */
500 SUBL(1) = krl; SUBR(1) = krr; 611 subL[1] = krl; subR[1] = krr;
501 /* rotation left shift 15bit */ 612 /* rotation left shift 15bit */
502 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); 613 ROLDQ(kll, klr, krl, krr, w0, w1, 15);
503 /* k3 */ 614 /* k3 */
504 SUBL(4) = kll; SUBR(4) = klr; 615 subL[4] = kll; subR[4] = klr;
505 /* k4 */ 616 /* k4 */
506 SUBL(5) = krl; SUBR(5) = krr; 617 subL[5] = krl; subR[5] = krr;
507 /* rotation left shift 15+30bit */ 618 /* rotation left shift 15+30bit */
508 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 30); 619 ROLDQ(kll, klr, krl, krr, w0, w1, 30);
509 /* k7 */ 620 /* k7 */
510 SUBL(10) = kll; SUBR(10) = klr; 621 subL[10] = kll; subR[10] = klr;
511 /* k8 */ 622 /* k8 */
512 SUBL(11) = krl; SUBR(11) = krr; 623 subL[11] = krl; subR[11] = krr;
513 /* rotation left shift 15+30+15bit */ 624 /* rotation left shift 15+30+15bit */
514 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); 625 ROLDQ(kll, klr, krl, krr, w0, w1, 15);
515 /* k10 */ 626 /* k10 */
516 SUBL(13) = krl; SUBR(13) = krr; 627 subL[13] = krl; subR[13] = krr;
517 /* rotation left shift 15+30+15+17 bit */ 628 /* rotation left shift 15+30+15+17 bit */
518 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17); 629 ROLDQ(kll, klr, krl, krr, w0, w1, 17);
519 /* kl3 */ 630 /* kl3 */
520 SUBL(16) = kll; SUBR(16) = klr; 631 subL[16] = kll; subR[16] = klr;
521 /* kl4 */ 632 /* kl4 */
522 SUBL(17) = krl; SUBR(17) = krr; 633 subL[17] = krl; subR[17] = krr;
523 /* rotation left shift 15+30+15+17+17 bit */ 634 /* rotation left shift 15+30+15+17+17 bit */
524 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17); 635 ROLDQ(kll, klr, krl, krr, w0, w1, 17);
525 /* k13 */ 636 /* k13 */
526 SUBL(18) = kll; SUBR(18) = klr; 637 subL[18] = kll; subR[18] = klr;
527 /* k14 */ 638 /* k14 */
528 SUBL(19) = krl; SUBR(19) = krr; 639 subL[19] = krl; subR[19] = krr;
529 /* rotation left shift 15+30+15+17+17+17 bit */ 640 /* rotation left shift 15+30+15+17+17+17 bit */
530 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17); 641 ROLDQ(kll, klr, krl, krr, w0, w1, 17);
531 /* k17 */ 642 /* k17 */
532 SUBL(22) = kll; SUBR(22) = klr; 643 subL[22] = kll; subR[22] = klr;
533 /* k18 */ 644 /* k18 */
534 SUBL(23) = krl; SUBR(23) = krr; 645 subL[23] = krl; subR[23] = krr;
535 646
536 /* generate KA */ 647 /* generate KA */
537 kll = SUBL(0); klr = SUBR(0); 648 kll = subL[0]; klr = subR[0];
538 krl = SUBL(1); krr = SUBR(1); 649 krl = subL[1]; krr = subR[1];
539 CAMELLIA_F(kll, klr, 650 CAMELLIA_F(kll, klr,
540 CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R, 651 CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R,
541 w0, w1, il, ir, t0, t1); 652 w0, w1, il, ir, t0, t1);
@@ -555,306 +666,108 @@ static void camellia_setup128(const unsigned char *key, u32 *subkey)
555 666
556 /* generate KA dependent subkeys */ 667 /* generate KA dependent subkeys */
557 /* k1, k2 */ 668 /* k1, k2 */
558 SUBL(2) = kll; SUBR(2) = klr; 669 subL[2] = kll; subR[2] = klr;
559 SUBL(3) = krl; SUBR(3) = krr; 670 subL[3] = krl; subR[3] = krr;
560 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); 671 ROLDQ(kll, klr, krl, krr, w0, w1, 15);
561 /* k5,k6 */ 672 /* k5,k6 */
562 SUBL(6) = kll; SUBR(6) = klr; 673 subL[6] = kll; subR[6] = klr;
563 SUBL(7) = krl; SUBR(7) = krr; 674 subL[7] = krl; subR[7] = krr;
564 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); 675 ROLDQ(kll, klr, krl, krr, w0, w1, 15);
565 /* kl1, kl2 */ 676 /* kl1, kl2 */
566 SUBL(8) = kll; SUBR(8) = klr; 677 subL[8] = kll; subR[8] = klr;
567 SUBL(9) = krl; SUBR(9) = krr; 678 subL[9] = krl; subR[9] = krr;
568 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); 679 ROLDQ(kll, klr, krl, krr, w0, w1, 15);
569 /* k9 */ 680 /* k9 */
570 SUBL(12) = kll; SUBR(12) = klr; 681 subL[12] = kll; subR[12] = klr;
571 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); 682 ROLDQ(kll, klr, krl, krr, w0, w1, 15);
572 /* k11, k12 */ 683 /* k11, k12 */
573 SUBL(14) = kll; SUBR(14) = klr; 684 subL[14] = kll; subR[14] = klr;
574 SUBL(15) = krl; SUBR(15) = krr; 685 subL[15] = krl; subR[15] = krr;
575 CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 34); 686 ROLDQo32(kll, klr, krl, krr, w0, w1, 34);
576 /* k15, k16 */ 687 /* k15, k16 */
577 SUBL(20) = kll; SUBR(20) = klr; 688 subL[20] = kll; subR[20] = klr;
578 SUBL(21) = krl; SUBR(21) = krr; 689 subL[21] = krl; subR[21] = krr;
579 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17); 690 ROLDQ(kll, klr, krl, krr, w0, w1, 17);
580 /* kw3, kw4 */ 691 /* kw3, kw4 */
581 SUBL(24) = kll; SUBR(24) = klr; 692 subL[24] = kll; subR[24] = klr;
582 SUBL(25) = krl; SUBR(25) = krr; 693 subL[25] = krl; subR[25] = krr;
583 694
584 695 camellia_setup_tail(subkey, subL, subR, 24);
585 /* absorb kw2 to other subkeys */
586 /* round 2 */
587 SUBL(3) ^= SUBL(1); SUBR(3) ^= SUBR(1);
588 /* round 4 */
589 SUBL(5) ^= SUBL(1); SUBR(5) ^= SUBR(1);
590 /* round 6 */
591 SUBL(7) ^= SUBL(1); SUBR(7) ^= SUBR(1);
592 SUBL(1) ^= SUBR(1) & ~SUBR(9);
593 dw = SUBL(1) & SUBL(9),
594 SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl2) */
595 /* round 8 */
596 SUBL(11) ^= SUBL(1); SUBR(11) ^= SUBR(1);
597 /* round 10 */
598 SUBL(13) ^= SUBL(1); SUBR(13) ^= SUBR(1);
599 /* round 12 */
600 SUBL(15) ^= SUBL(1); SUBR(15) ^= SUBR(1);
601 SUBL(1) ^= SUBR(1) & ~SUBR(17);
602 dw = SUBL(1) & SUBL(17),
603 SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl4) */
604 /* round 14 */
605 SUBL(19) ^= SUBL(1); SUBR(19) ^= SUBR(1);
606 /* round 16 */
607 SUBL(21) ^= SUBL(1); SUBR(21) ^= SUBR(1);
608 /* round 18 */
609 SUBL(23) ^= SUBL(1); SUBR(23) ^= SUBR(1);
610 /* kw3 */
611 SUBL(24) ^= SUBL(1); SUBR(24) ^= SUBR(1);
612
613 /* absorb kw4 to other subkeys */
614 kw4l = SUBL(25); kw4r = SUBR(25);
615 /* round 17 */
616 SUBL(22) ^= kw4l; SUBR(22) ^= kw4r;
617 /* round 15 */
618 SUBL(20) ^= kw4l; SUBR(20) ^= kw4r;
619 /* round 13 */
620 SUBL(18) ^= kw4l; SUBR(18) ^= kw4r;
621 kw4l ^= kw4r & ~SUBR(16);
622 dw = kw4l & SUBL(16),
623 kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl3) */
624 /* round 11 */
625 SUBL(14) ^= kw4l; SUBR(14) ^= kw4r;
626 /* round 9 */
627 SUBL(12) ^= kw4l; SUBR(12) ^= kw4r;
628 /* round 7 */
629 SUBL(10) ^= kw4l; SUBR(10) ^= kw4r;
630 kw4l ^= kw4r & ~SUBR(8);
631 dw = kw4l & SUBL(8),
632 kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl1) */
633 /* round 5 */
634 SUBL(6) ^= kw4l; SUBR(6) ^= kw4r;
635 /* round 3 */
636 SUBL(4) ^= kw4l; SUBR(4) ^= kw4r;
637 /* round 1 */
638 SUBL(2) ^= kw4l; SUBR(2) ^= kw4r;
639 /* kw1 */
640 SUBL(0) ^= kw4l; SUBR(0) ^= kw4r;
641
642
643 /* key XOR is end of F-function */
644 CAMELLIA_SUBKEY_L(0) = SUBL(0) ^ SUBL(2);/* kw1 */
645 CAMELLIA_SUBKEY_R(0) = SUBR(0) ^ SUBR(2);
646 CAMELLIA_SUBKEY_L(2) = SUBL(3); /* round 1 */
647 CAMELLIA_SUBKEY_R(2) = SUBR(3);
648 CAMELLIA_SUBKEY_L(3) = SUBL(2) ^ SUBL(4); /* round 2 */
649 CAMELLIA_SUBKEY_R(3) = SUBR(2) ^ SUBR(4);
650 CAMELLIA_SUBKEY_L(4) = SUBL(3) ^ SUBL(5); /* round 3 */
651 CAMELLIA_SUBKEY_R(4) = SUBR(3) ^ SUBR(5);
652 CAMELLIA_SUBKEY_L(5) = SUBL(4) ^ SUBL(6); /* round 4 */
653 CAMELLIA_SUBKEY_R(5) = SUBR(4) ^ SUBR(6);
654 CAMELLIA_SUBKEY_L(6) = SUBL(5) ^ SUBL(7); /* round 5 */
655 CAMELLIA_SUBKEY_R(6) = SUBR(5) ^ SUBR(7);
656 tl = SUBL(10) ^ (SUBR(10) & ~SUBR(8));
657 dw = tl & SUBL(8), /* FL(kl1) */
658 tr = SUBR(10) ^ CAMELLIA_RL1(dw);
659 CAMELLIA_SUBKEY_L(7) = SUBL(6) ^ tl; /* round 6 */
660 CAMELLIA_SUBKEY_R(7) = SUBR(6) ^ tr;
661 CAMELLIA_SUBKEY_L(8) = SUBL(8); /* FL(kl1) */
662 CAMELLIA_SUBKEY_R(8) = SUBR(8);
663 CAMELLIA_SUBKEY_L(9) = SUBL(9); /* FLinv(kl2) */
664 CAMELLIA_SUBKEY_R(9) = SUBR(9);
665 tl = SUBL(7) ^ (SUBR(7) & ~SUBR(9));
666 dw = tl & SUBL(9), /* FLinv(kl2) */
667 tr = SUBR(7) ^ CAMELLIA_RL1(dw);
668 CAMELLIA_SUBKEY_L(10) = tl ^ SUBL(11); /* round 7 */
669 CAMELLIA_SUBKEY_R(10) = tr ^ SUBR(11);
670 CAMELLIA_SUBKEY_L(11) = SUBL(10) ^ SUBL(12); /* round 8 */
671 CAMELLIA_SUBKEY_R(11) = SUBR(10) ^ SUBR(12);
672 CAMELLIA_SUBKEY_L(12) = SUBL(11) ^ SUBL(13); /* round 9 */
673 CAMELLIA_SUBKEY_R(12) = SUBR(11) ^ SUBR(13);
674 CAMELLIA_SUBKEY_L(13) = SUBL(12) ^ SUBL(14); /* round 10 */
675 CAMELLIA_SUBKEY_R(13) = SUBR(12) ^ SUBR(14);
676 CAMELLIA_SUBKEY_L(14) = SUBL(13) ^ SUBL(15); /* round 11 */
677 CAMELLIA_SUBKEY_R(14) = SUBR(13) ^ SUBR(15);
678 tl = SUBL(18) ^ (SUBR(18) & ~SUBR(16));
679 dw = tl & SUBL(16), /* FL(kl3) */
680 tr = SUBR(18) ^ CAMELLIA_RL1(dw);
681 CAMELLIA_SUBKEY_L(15) = SUBL(14) ^ tl; /* round 12 */
682 CAMELLIA_SUBKEY_R(15) = SUBR(14) ^ tr;
683 CAMELLIA_SUBKEY_L(16) = SUBL(16); /* FL(kl3) */
684 CAMELLIA_SUBKEY_R(16) = SUBR(16);
685 CAMELLIA_SUBKEY_L(17) = SUBL(17); /* FLinv(kl4) */
686 CAMELLIA_SUBKEY_R(17) = SUBR(17);
687 tl = SUBL(15) ^ (SUBR(15) & ~SUBR(17));
688 dw = tl & SUBL(17), /* FLinv(kl4) */
689 tr = SUBR(15) ^ CAMELLIA_RL1(dw);
690 CAMELLIA_SUBKEY_L(18) = tl ^ SUBL(19); /* round 13 */
691 CAMELLIA_SUBKEY_R(18) = tr ^ SUBR(19);
692 CAMELLIA_SUBKEY_L(19) = SUBL(18) ^ SUBL(20); /* round 14 */
693 CAMELLIA_SUBKEY_R(19) = SUBR(18) ^ SUBR(20);
694 CAMELLIA_SUBKEY_L(20) = SUBL(19) ^ SUBL(21); /* round 15 */
695 CAMELLIA_SUBKEY_R(20) = SUBR(19) ^ SUBR(21);
696 CAMELLIA_SUBKEY_L(21) = SUBL(20) ^ SUBL(22); /* round 16 */
697 CAMELLIA_SUBKEY_R(21) = SUBR(20) ^ SUBR(22);
698 CAMELLIA_SUBKEY_L(22) = SUBL(21) ^ SUBL(23); /* round 17 */
699 CAMELLIA_SUBKEY_R(22) = SUBR(21) ^ SUBR(23);
700 CAMELLIA_SUBKEY_L(23) = SUBL(22); /* round 18 */
701 CAMELLIA_SUBKEY_R(23) = SUBR(22);
702 CAMELLIA_SUBKEY_L(24) = SUBL(24) ^ SUBL(23); /* kw3 */
703 CAMELLIA_SUBKEY_R(24) = SUBR(24) ^ SUBR(23);
704
705 /* apply the inverse of the last half of P-function */
706 dw = CAMELLIA_SUBKEY_L(2) ^ CAMELLIA_SUBKEY_R(2),
707 dw = CAMELLIA_RL8(dw);/* round 1 */
708 CAMELLIA_SUBKEY_R(2) = CAMELLIA_SUBKEY_L(2) ^ dw,
709 CAMELLIA_SUBKEY_L(2) = dw;
710 dw = CAMELLIA_SUBKEY_L(3) ^ CAMELLIA_SUBKEY_R(3),
711 dw = CAMELLIA_RL8(dw);/* round 2 */
712 CAMELLIA_SUBKEY_R(3) = CAMELLIA_SUBKEY_L(3) ^ dw,
713 CAMELLIA_SUBKEY_L(3) = dw;
714 dw = CAMELLIA_SUBKEY_L(4) ^ CAMELLIA_SUBKEY_R(4),
715 dw = CAMELLIA_RL8(dw);/* round 3 */
716 CAMELLIA_SUBKEY_R(4) = CAMELLIA_SUBKEY_L(4) ^ dw,
717 CAMELLIA_SUBKEY_L(4) = dw;
718 dw = CAMELLIA_SUBKEY_L(5) ^ CAMELLIA_SUBKEY_R(5),
719 dw = CAMELLIA_RL8(dw);/* round 4 */
720 CAMELLIA_SUBKEY_R(5) = CAMELLIA_SUBKEY_L(5) ^ dw,
721 CAMELLIA_SUBKEY_L(5) = dw;
722 dw = CAMELLIA_SUBKEY_L(6) ^ CAMELLIA_SUBKEY_R(6),
723 dw = CAMELLIA_RL8(dw);/* round 5 */
724 CAMELLIA_SUBKEY_R(6) = CAMELLIA_SUBKEY_L(6) ^ dw,
725 CAMELLIA_SUBKEY_L(6) = dw;
726 dw = CAMELLIA_SUBKEY_L(7) ^ CAMELLIA_SUBKEY_R(7),
727 dw = CAMELLIA_RL8(dw);/* round 6 */
728 CAMELLIA_SUBKEY_R(7) = CAMELLIA_SUBKEY_L(7) ^ dw,
729 CAMELLIA_SUBKEY_L(7) = dw;
730 dw = CAMELLIA_SUBKEY_L(10) ^ CAMELLIA_SUBKEY_R(10),
731 dw = CAMELLIA_RL8(dw);/* round 7 */
732 CAMELLIA_SUBKEY_R(10) = CAMELLIA_SUBKEY_L(10) ^ dw,
733 CAMELLIA_SUBKEY_L(10) = dw;
734 dw = CAMELLIA_SUBKEY_L(11) ^ CAMELLIA_SUBKEY_R(11),
735 dw = CAMELLIA_RL8(dw);/* round 8 */
736 CAMELLIA_SUBKEY_R(11) = CAMELLIA_SUBKEY_L(11) ^ dw,
737 CAMELLIA_SUBKEY_L(11) = dw;
738 dw = CAMELLIA_SUBKEY_L(12) ^ CAMELLIA_SUBKEY_R(12),
739 dw = CAMELLIA_RL8(dw);/* round 9 */
740 CAMELLIA_SUBKEY_R(12) = CAMELLIA_SUBKEY_L(12) ^ dw,
741 CAMELLIA_SUBKEY_L(12) = dw;
742 dw = CAMELLIA_SUBKEY_L(13) ^ CAMELLIA_SUBKEY_R(13),
743 dw = CAMELLIA_RL8(dw);/* round 10 */
744 CAMELLIA_SUBKEY_R(13) = CAMELLIA_SUBKEY_L(13) ^ dw,
745 CAMELLIA_SUBKEY_L(13) = dw;
746 dw = CAMELLIA_SUBKEY_L(14) ^ CAMELLIA_SUBKEY_R(14),
747 dw = CAMELLIA_RL8(dw);/* round 11 */
748 CAMELLIA_SUBKEY_R(14) = CAMELLIA_SUBKEY_L(14) ^ dw,
749 CAMELLIA_SUBKEY_L(14) = dw;
750 dw = CAMELLIA_SUBKEY_L(15) ^ CAMELLIA_SUBKEY_R(15),
751 dw = CAMELLIA_RL8(dw);/* round 12 */
752 CAMELLIA_SUBKEY_R(15) = CAMELLIA_SUBKEY_L(15) ^ dw,
753 CAMELLIA_SUBKEY_L(15) = dw;
754 dw = CAMELLIA_SUBKEY_L(18) ^ CAMELLIA_SUBKEY_R(18),
755 dw = CAMELLIA_RL8(dw);/* round 13 */
756 CAMELLIA_SUBKEY_R(18) = CAMELLIA_SUBKEY_L(18) ^ dw,
757 CAMELLIA_SUBKEY_L(18) = dw;
758 dw = CAMELLIA_SUBKEY_L(19) ^ CAMELLIA_SUBKEY_R(19),
759 dw = CAMELLIA_RL8(dw);/* round 14 */
760 CAMELLIA_SUBKEY_R(19) = CAMELLIA_SUBKEY_L(19) ^ dw,
761 CAMELLIA_SUBKEY_L(19) = dw;
762 dw = CAMELLIA_SUBKEY_L(20) ^ CAMELLIA_SUBKEY_R(20),
763 dw = CAMELLIA_RL8(dw);/* round 15 */
764 CAMELLIA_SUBKEY_R(20) = CAMELLIA_SUBKEY_L(20) ^ dw,
765 CAMELLIA_SUBKEY_L(20) = dw;
766 dw = CAMELLIA_SUBKEY_L(21) ^ CAMELLIA_SUBKEY_R(21),
767 dw = CAMELLIA_RL8(dw);/* round 16 */
768 CAMELLIA_SUBKEY_R(21) = CAMELLIA_SUBKEY_L(21) ^ dw,
769 CAMELLIA_SUBKEY_L(21) = dw;
770 dw = CAMELLIA_SUBKEY_L(22) ^ CAMELLIA_SUBKEY_R(22),
771 dw = CAMELLIA_RL8(dw);/* round 17 */
772 CAMELLIA_SUBKEY_R(22) = CAMELLIA_SUBKEY_L(22) ^ dw,
773 CAMELLIA_SUBKEY_L(22) = dw;
774 dw = CAMELLIA_SUBKEY_L(23) ^ CAMELLIA_SUBKEY_R(23),
775 dw = CAMELLIA_RL8(dw);/* round 18 */
776 CAMELLIA_SUBKEY_R(23) = CAMELLIA_SUBKEY_L(23) ^ dw,
777 CAMELLIA_SUBKEY_L(23) = dw;
778
779 return;
780} 696}
781 697
782
783static void camellia_setup256(const unsigned char *key, u32 *subkey) 698static void camellia_setup256(const unsigned char *key, u32 *subkey)
784{ 699{
785 u32 kll,klr,krl,krr; /* left half of key */ 700 u32 kll, klr, krl, krr; /* left half of key */
786 u32 krll,krlr,krrl,krrr; /* right half of key */ 701 u32 krll, krlr, krrl, krrr; /* right half of key */
787 u32 il, ir, t0, t1, w0, w1; /* temporary variables */ 702 u32 il, ir, t0, t1, w0, w1; /* temporary variables */
788 u32 kw4l, kw4r, dw, tl, tr;
789 u32 subL[34]; 703 u32 subL[34];
790 u32 subR[34]; 704 u32 subR[34];
791 705
792 /** 706 /**
793 * key = (kll || klr || krl || krr || krll || krlr || krrl || krrr) 707 * key = (kll || klr || krl || krr || krll || krlr || krrl || krrr)
794 * (|| is concatination) 708 * (|| is concatenation)
795 */ 709 */
796 710 GETU32(kll, key );
797 kll = GETU32(key ); 711 GETU32(klr, key + 4);
798 klr = GETU32(key + 4); 712 GETU32(krl, key + 8);
799 krl = GETU32(key + 8); 713 GETU32(krr, key + 12);
800 krr = GETU32(key + 12); 714 GETU32(krll, key + 16);
801 krll = GETU32(key + 16); 715 GETU32(krlr, key + 20);
802 krlr = GETU32(key + 20); 716 GETU32(krrl, key + 24);
803 krrl = GETU32(key + 24); 717 GETU32(krrr, key + 28);
804 krrr = GETU32(key + 28);
805 718
806 /* generate KL dependent subkeys */ 719 /* generate KL dependent subkeys */
807 /* kw1 */ 720 /* kw1 */
808 SUBL(0) = kll; SUBR(0) = klr; 721 subL[0] = kll; subR[0] = klr;
809 /* kw2 */ 722 /* kw2 */
810 SUBL(1) = krl; SUBR(1) = krr; 723 subL[1] = krl; subR[1] = krr;
811 CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 45); 724 ROLDQo32(kll, klr, krl, krr, w0, w1, 45);
812 /* k9 */ 725 /* k9 */
813 SUBL(12) = kll; SUBR(12) = klr; 726 subL[12] = kll; subR[12] = klr;
814 /* k10 */ 727 /* k10 */
815 SUBL(13) = krl; SUBR(13) = krr; 728 subL[13] = krl; subR[13] = krr;
816 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); 729 ROLDQ(kll, klr, krl, krr, w0, w1, 15);
817 /* kl3 */ 730 /* kl3 */
818 SUBL(16) = kll; SUBR(16) = klr; 731 subL[16] = kll; subR[16] = klr;
819 /* kl4 */ 732 /* kl4 */
820 SUBL(17) = krl; SUBR(17) = krr; 733 subL[17] = krl; subR[17] = krr;
821 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 17); 734 ROLDQ(kll, klr, krl, krr, w0, w1, 17);
822 /* k17 */ 735 /* k17 */
823 SUBL(22) = kll; SUBR(22) = klr; 736 subL[22] = kll; subR[22] = klr;
824 /* k18 */ 737 /* k18 */
825 SUBL(23) = krl; SUBR(23) = krr; 738 subL[23] = krl; subR[23] = krr;
826 CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 34); 739 ROLDQo32(kll, klr, krl, krr, w0, w1, 34);
827 /* k23 */ 740 /* k23 */
828 SUBL(30) = kll; SUBR(30) = klr; 741 subL[30] = kll; subR[30] = klr;
829 /* k24 */ 742 /* k24 */
830 SUBL(31) = krl; SUBR(31) = krr; 743 subL[31] = krl; subR[31] = krr;
831 744
832 /* generate KR dependent subkeys */ 745 /* generate KR dependent subkeys */
833 CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15); 746 ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15);
834 /* k3 */ 747 /* k3 */
835 SUBL(4) = krll; SUBR(4) = krlr; 748 subL[4] = krll; subR[4] = krlr;
836 /* k4 */ 749 /* k4 */
837 SUBL(5) = krrl; SUBR(5) = krrr; 750 subL[5] = krrl; subR[5] = krrr;
838 CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15); 751 ROLDQ(krll, krlr, krrl, krrr, w0, w1, 15);
839 /* kl1 */ 752 /* kl1 */
840 SUBL(8) = krll; SUBR(8) = krlr; 753 subL[8] = krll; subR[8] = krlr;
841 /* kl2 */ 754 /* kl2 */
842 SUBL(9) = krrl; SUBR(9) = krrr; 755 subL[9] = krrl; subR[9] = krrr;
843 CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); 756 ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30);
844 /* k13 */ 757 /* k13 */
845 SUBL(18) = krll; SUBR(18) = krlr; 758 subL[18] = krll; subR[18] = krlr;
846 /* k14 */ 759 /* k14 */
847 SUBL(19) = krrl; SUBR(19) = krrr; 760 subL[19] = krrl; subR[19] = krrr;
848 CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34); 761 ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34);
849 /* k19 */ 762 /* k19 */
850 SUBL(26) = krll; SUBR(26) = krlr; 763 subL[26] = krll; subR[26] = krlr;
851 /* k20 */ 764 /* k20 */
852 SUBL(27) = krrl; SUBR(27) = krrr; 765 subL[27] = krrl; subR[27] = krrr;
853 CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34); 766 ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 34);
854 767
855 /* generate KA */ 768 /* generate KA */
856 kll = SUBL(0) ^ krll; klr = SUBR(0) ^ krlr; 769 kll = subL[0] ^ krll; klr = subR[0] ^ krlr;
857 krl = SUBL(1) ^ krrl; krr = SUBR(1) ^ krrr; 770 krl = subL[1] ^ krrl; krr = subR[1] ^ krrr;
858 CAMELLIA_F(kll, klr, 771 CAMELLIA_F(kll, klr,
859 CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R, 772 CAMELLIA_SIGMA1L, CAMELLIA_SIGMA1R,
860 w0, w1, il, ir, t0, t1); 773 w0, w1, il, ir, t0, t1);
@@ -885,310 +798,50 @@ static void camellia_setup256(const unsigned char *key, u32 *subkey)
885 krll ^= w0; krlr ^= w1; 798 krll ^= w0; krlr ^= w1;
886 799
887 /* generate KA dependent subkeys */ 800 /* generate KA dependent subkeys */
888 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 15); 801 ROLDQ(kll, klr, krl, krr, w0, w1, 15);
889 /* k5 */ 802 /* k5 */
890 SUBL(6) = kll; SUBR(6) = klr; 803 subL[6] = kll; subR[6] = klr;
891 /* k6 */ 804 /* k6 */
892 SUBL(7) = krl; SUBR(7) = krr; 805 subL[7] = krl; subR[7] = krr;
893 CAMELLIA_ROLDQ(kll, klr, krl, krr, w0, w1, 30); 806 ROLDQ(kll, klr, krl, krr, w0, w1, 30);
894 /* k11 */ 807 /* k11 */
895 SUBL(14) = kll; SUBR(14) = klr; 808 subL[14] = kll; subR[14] = klr;
896 /* k12 */ 809 /* k12 */
897 SUBL(15) = krl; SUBR(15) = krr; 810 subL[15] = krl; subR[15] = krr;
898 /* rotation left shift 32bit */ 811 /* rotation left shift 32bit */
899 /* kl5 */ 812 /* kl5 */
900 SUBL(24) = klr; SUBR(24) = krl; 813 subL[24] = klr; subR[24] = krl;
901 /* kl6 */ 814 /* kl6 */
902 SUBL(25) = krr; SUBR(25) = kll; 815 subL[25] = krr; subR[25] = kll;
903 /* rotation left shift 49 from k11,k12 -> k21,k22 */ 816 /* rotation left shift 49 from k11,k12 -> k21,k22 */
904 CAMELLIA_ROLDQo32(kll, klr, krl, krr, w0, w1, 49); 817 ROLDQo32(kll, klr, krl, krr, w0, w1, 49);
905 /* k21 */ 818 /* k21 */
906 SUBL(28) = kll; SUBR(28) = klr; 819 subL[28] = kll; subR[28] = klr;
907 /* k22 */ 820 /* k22 */
908 SUBL(29) = krl; SUBR(29) = krr; 821 subL[29] = krl; subR[29] = krr;
909 822
910 /* generate KB dependent subkeys */ 823 /* generate KB dependent subkeys */
911 /* k1 */ 824 /* k1 */
912 SUBL(2) = krll; SUBR(2) = krlr; 825 subL[2] = krll; subR[2] = krlr;
913 /* k2 */ 826 /* k2 */
914 SUBL(3) = krrl; SUBR(3) = krrr; 827 subL[3] = krrl; subR[3] = krrr;
915 CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); 828 ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30);
916 /* k7 */ 829 /* k7 */
917 SUBL(10) = krll; SUBR(10) = krlr; 830 subL[10] = krll; subR[10] = krlr;
918 /* k8 */ 831 /* k8 */
919 SUBL(11) = krrl; SUBR(11) = krrr; 832 subL[11] = krrl; subR[11] = krrr;
920 CAMELLIA_ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30); 833 ROLDQ(krll, krlr, krrl, krrr, w0, w1, 30);
921 /* k15 */ 834 /* k15 */
922 SUBL(20) = krll; SUBR(20) = krlr; 835 subL[20] = krll; subR[20] = krlr;
923 /* k16 */ 836 /* k16 */
924 SUBL(21) = krrl; SUBR(21) = krrr; 837 subL[21] = krrl; subR[21] = krrr;
925 CAMELLIA_ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 51); 838 ROLDQo32(krll, krlr, krrl, krrr, w0, w1, 51);
926 /* kw3 */ 839 /* kw3 */
927 SUBL(32) = krll; SUBR(32) = krlr; 840 subL[32] = krll; subR[32] = krlr;
928 /* kw4 */ 841 /* kw4 */
929 SUBL(33) = krrl; SUBR(33) = krrr; 842 subL[33] = krrl; subR[33] = krrr;
930
931 /* absorb kw2 to other subkeys */
932 /* round 2 */
933 SUBL(3) ^= SUBL(1); SUBR(3) ^= SUBR(1);
934 /* round 4 */
935 SUBL(5) ^= SUBL(1); SUBR(5) ^= SUBR(1);
936 /* round 6 */
937 SUBL(7) ^= SUBL(1); SUBR(7) ^= SUBR(1);
938 SUBL(1) ^= SUBR(1) & ~SUBR(9);
939 dw = SUBL(1) & SUBL(9),
940 SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl2) */
941 /* round 8 */
942 SUBL(11) ^= SUBL(1); SUBR(11) ^= SUBR(1);
943 /* round 10 */
944 SUBL(13) ^= SUBL(1); SUBR(13) ^= SUBR(1);
945 /* round 12 */
946 SUBL(15) ^= SUBL(1); SUBR(15) ^= SUBR(1);
947 SUBL(1) ^= SUBR(1) & ~SUBR(17);
948 dw = SUBL(1) & SUBL(17),
949 SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl4) */
950 /* round 14 */
951 SUBL(19) ^= SUBL(1); SUBR(19) ^= SUBR(1);
952 /* round 16 */
953 SUBL(21) ^= SUBL(1); SUBR(21) ^= SUBR(1);
954 /* round 18 */
955 SUBL(23) ^= SUBL(1); SUBR(23) ^= SUBR(1);
956 SUBL(1) ^= SUBR(1) & ~SUBR(25);
957 dw = SUBL(1) & SUBL(25),
958 SUBR(1) ^= CAMELLIA_RL1(dw); /* modified for FLinv(kl6) */
959 /* round 20 */
960 SUBL(27) ^= SUBL(1); SUBR(27) ^= SUBR(1);
961 /* round 22 */
962 SUBL(29) ^= SUBL(1); SUBR(29) ^= SUBR(1);
963 /* round 24 */
964 SUBL(31) ^= SUBL(1); SUBR(31) ^= SUBR(1);
965 /* kw3 */
966 SUBL(32) ^= SUBL(1); SUBR(32) ^= SUBR(1);
967
968
969 /* absorb kw4 to other subkeys */
970 kw4l = SUBL(33); kw4r = SUBR(33);
971 /* round 23 */
972 SUBL(30) ^= kw4l; SUBR(30) ^= kw4r;
973 /* round 21 */
974 SUBL(28) ^= kw4l; SUBR(28) ^= kw4r;
975 /* round 19 */
976 SUBL(26) ^= kw4l; SUBR(26) ^= kw4r;
977 kw4l ^= kw4r & ~SUBR(24);
978 dw = kw4l & SUBL(24),
979 kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl5) */
980 /* round 17 */
981 SUBL(22) ^= kw4l; SUBR(22) ^= kw4r;
982 /* round 15 */
983 SUBL(20) ^= kw4l; SUBR(20) ^= kw4r;
984 /* round 13 */
985 SUBL(18) ^= kw4l; SUBR(18) ^= kw4r;
986 kw4l ^= kw4r & ~SUBR(16);
987 dw = kw4l & SUBL(16),
988 kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl3) */
989 /* round 11 */
990 SUBL(14) ^= kw4l; SUBR(14) ^= kw4r;
991 /* round 9 */
992 SUBL(12) ^= kw4l; SUBR(12) ^= kw4r;
993 /* round 7 */
994 SUBL(10) ^= kw4l; SUBR(10) ^= kw4r;
995 kw4l ^= kw4r & ~SUBR(8);
996 dw = kw4l & SUBL(8),
997 kw4r ^= CAMELLIA_RL1(dw); /* modified for FL(kl1) */
998 /* round 5 */
999 SUBL(6) ^= kw4l; SUBR(6) ^= kw4r;
1000 /* round 3 */
1001 SUBL(4) ^= kw4l; SUBR(4) ^= kw4r;
1002 /* round 1 */
1003 SUBL(2) ^= kw4l; SUBR(2) ^= kw4r;
1004 /* kw1 */
1005 SUBL(0) ^= kw4l; SUBR(0) ^= kw4r;
1006 843
1007 /* key XOR is end of F-function */ 844 camellia_setup_tail(subkey, subL, subR, 32);
1008 CAMELLIA_SUBKEY_L(0) = SUBL(0) ^ SUBL(2);/* kw1 */
1009 CAMELLIA_SUBKEY_R(0) = SUBR(0) ^ SUBR(2);
1010 CAMELLIA_SUBKEY_L(2) = SUBL(3); /* round 1 */
1011 CAMELLIA_SUBKEY_R(2) = SUBR(3);
1012 CAMELLIA_SUBKEY_L(3) = SUBL(2) ^ SUBL(4); /* round 2 */
1013 CAMELLIA_SUBKEY_R(3) = SUBR(2) ^ SUBR(4);
1014 CAMELLIA_SUBKEY_L(4) = SUBL(3) ^ SUBL(5); /* round 3 */
1015 CAMELLIA_SUBKEY_R(4) = SUBR(3) ^ SUBR(5);
1016 CAMELLIA_SUBKEY_L(5) = SUBL(4) ^ SUBL(6); /* round 4 */
1017 CAMELLIA_SUBKEY_R(5) = SUBR(4) ^ SUBR(6);
1018 CAMELLIA_SUBKEY_L(6) = SUBL(5) ^ SUBL(7); /* round 5 */
1019 CAMELLIA_SUBKEY_R(6) = SUBR(5) ^ SUBR(7);
1020 tl = SUBL(10) ^ (SUBR(10) & ~SUBR(8));
1021 dw = tl & SUBL(8), /* FL(kl1) */
1022 tr = SUBR(10) ^ CAMELLIA_RL1(dw);
1023 CAMELLIA_SUBKEY_L(7) = SUBL(6) ^ tl; /* round 6 */
1024 CAMELLIA_SUBKEY_R(7) = SUBR(6) ^ tr;
1025 CAMELLIA_SUBKEY_L(8) = SUBL(8); /* FL(kl1) */
1026 CAMELLIA_SUBKEY_R(8) = SUBR(8);
1027 CAMELLIA_SUBKEY_L(9) = SUBL(9); /* FLinv(kl2) */
1028 CAMELLIA_SUBKEY_R(9) = SUBR(9);
1029 tl = SUBL(7) ^ (SUBR(7) & ~SUBR(9));
1030 dw = tl & SUBL(9), /* FLinv(kl2) */
1031 tr = SUBR(7) ^ CAMELLIA_RL1(dw);
1032 CAMELLIA_SUBKEY_L(10) = tl ^ SUBL(11); /* round 7 */
1033 CAMELLIA_SUBKEY_R(10) = tr ^ SUBR(11);
1034 CAMELLIA_SUBKEY_L(11) = SUBL(10) ^ SUBL(12); /* round 8 */
1035 CAMELLIA_SUBKEY_R(11) = SUBR(10) ^ SUBR(12);
1036 CAMELLIA_SUBKEY_L(12) = SUBL(11) ^ SUBL(13); /* round 9 */
1037 CAMELLIA_SUBKEY_R(12) = SUBR(11) ^ SUBR(13);
1038 CAMELLIA_SUBKEY_L(13) = SUBL(12) ^ SUBL(14); /* round 10 */
1039 CAMELLIA_SUBKEY_R(13) = SUBR(12) ^ SUBR(14);
1040 CAMELLIA_SUBKEY_L(14) = SUBL(13) ^ SUBL(15); /* round 11 */
1041 CAMELLIA_SUBKEY_R(14) = SUBR(13) ^ SUBR(15);
1042 tl = SUBL(18) ^ (SUBR(18) & ~SUBR(16));
1043 dw = tl & SUBL(16), /* FL(kl3) */
1044 tr = SUBR(18) ^ CAMELLIA_RL1(dw);
1045 CAMELLIA_SUBKEY_L(15) = SUBL(14) ^ tl; /* round 12 */
1046 CAMELLIA_SUBKEY_R(15) = SUBR(14) ^ tr;
1047 CAMELLIA_SUBKEY_L(16) = SUBL(16); /* FL(kl3) */
1048 CAMELLIA_SUBKEY_R(16) = SUBR(16);
1049 CAMELLIA_SUBKEY_L(17) = SUBL(17); /* FLinv(kl4) */
1050 CAMELLIA_SUBKEY_R(17) = SUBR(17);
1051 tl = SUBL(15) ^ (SUBR(15) & ~SUBR(17));
1052 dw = tl & SUBL(17), /* FLinv(kl4) */
1053 tr = SUBR(15) ^ CAMELLIA_RL1(dw);
1054 CAMELLIA_SUBKEY_L(18) = tl ^ SUBL(19); /* round 13 */
1055 CAMELLIA_SUBKEY_R(18) = tr ^ SUBR(19);
1056 CAMELLIA_SUBKEY_L(19) = SUBL(18) ^ SUBL(20); /* round 14 */
1057 CAMELLIA_SUBKEY_R(19) = SUBR(18) ^ SUBR(20);
1058 CAMELLIA_SUBKEY_L(20) = SUBL(19) ^ SUBL(21); /* round 15 */
1059 CAMELLIA_SUBKEY_R(20) = SUBR(19) ^ SUBR(21);
1060 CAMELLIA_SUBKEY_L(21) = SUBL(20) ^ SUBL(22); /* round 16 */
1061 CAMELLIA_SUBKEY_R(21) = SUBR(20) ^ SUBR(22);
1062 CAMELLIA_SUBKEY_L(22) = SUBL(21) ^ SUBL(23); /* round 17 */
1063 CAMELLIA_SUBKEY_R(22) = SUBR(21) ^ SUBR(23);
1064 tl = SUBL(26) ^ (SUBR(26)
1065 & ~SUBR(24));
1066 dw = tl & SUBL(24), /* FL(kl5) */
1067 tr = SUBR(26) ^ CAMELLIA_RL1(dw);
1068 CAMELLIA_SUBKEY_L(23) = SUBL(22) ^ tl; /* round 18 */
1069 CAMELLIA_SUBKEY_R(23) = SUBR(22) ^ tr;
1070 CAMELLIA_SUBKEY_L(24) = SUBL(24); /* FL(kl5) */
1071 CAMELLIA_SUBKEY_R(24) = SUBR(24);
1072 CAMELLIA_SUBKEY_L(25) = SUBL(25); /* FLinv(kl6) */
1073 CAMELLIA_SUBKEY_R(25) = SUBR(25);
1074 tl = SUBL(23) ^ (SUBR(23) &
1075 ~SUBR(25));
1076 dw = tl & SUBL(25), /* FLinv(kl6) */
1077 tr = SUBR(23) ^ CAMELLIA_RL1(dw);
1078 CAMELLIA_SUBKEY_L(26) = tl ^ SUBL(27); /* round 19 */
1079 CAMELLIA_SUBKEY_R(26) = tr ^ SUBR(27);
1080 CAMELLIA_SUBKEY_L(27) = SUBL(26) ^ SUBL(28); /* round 20 */
1081 CAMELLIA_SUBKEY_R(27) = SUBR(26) ^ SUBR(28);
1082 CAMELLIA_SUBKEY_L(28) = SUBL(27) ^ SUBL(29); /* round 21 */
1083 CAMELLIA_SUBKEY_R(28) = SUBR(27) ^ SUBR(29);
1084 CAMELLIA_SUBKEY_L(29) = SUBL(28) ^ SUBL(30); /* round 22 */
1085 CAMELLIA_SUBKEY_R(29) = SUBR(28) ^ SUBR(30);
1086 CAMELLIA_SUBKEY_L(30) = SUBL(29) ^ SUBL(31); /* round 23 */
1087 CAMELLIA_SUBKEY_R(30) = SUBR(29) ^ SUBR(31);
1088 CAMELLIA_SUBKEY_L(31) = SUBL(30); /* round 24 */
1089 CAMELLIA_SUBKEY_R(31) = SUBR(30);
1090 CAMELLIA_SUBKEY_L(32) = SUBL(32) ^ SUBL(31); /* kw3 */
1091 CAMELLIA_SUBKEY_R(32) = SUBR(32) ^ SUBR(31);
1092
1093 /* apply the inverse of the last half of P-function */
1094 dw = CAMELLIA_SUBKEY_L(2) ^ CAMELLIA_SUBKEY_R(2),
1095 dw = CAMELLIA_RL8(dw);/* round 1 */
1096 CAMELLIA_SUBKEY_R(2) = CAMELLIA_SUBKEY_L(2) ^ dw,
1097 CAMELLIA_SUBKEY_L(2) = dw;
1098 dw = CAMELLIA_SUBKEY_L(3) ^ CAMELLIA_SUBKEY_R(3),
1099 dw = CAMELLIA_RL8(dw);/* round 2 */
1100 CAMELLIA_SUBKEY_R(3) = CAMELLIA_SUBKEY_L(3) ^ dw,
1101 CAMELLIA_SUBKEY_L(3) = dw;
1102 dw = CAMELLIA_SUBKEY_L(4) ^ CAMELLIA_SUBKEY_R(4),
1103 dw = CAMELLIA_RL8(dw);/* round 3 */
1104 CAMELLIA_SUBKEY_R(4) = CAMELLIA_SUBKEY_L(4) ^ dw,
1105 CAMELLIA_SUBKEY_L(4) = dw;
1106 dw = CAMELLIA_SUBKEY_L(5) ^ CAMELLIA_SUBKEY_R(5),
1107 dw = CAMELLIA_RL8(dw);/* round 4 */
1108 CAMELLIA_SUBKEY_R(5) = CAMELLIA_SUBKEY_L(5) ^ dw,
1109 CAMELLIA_SUBKEY_L(5) = dw;
1110 dw = CAMELLIA_SUBKEY_L(6) ^ CAMELLIA_SUBKEY_R(6),
1111 dw = CAMELLIA_RL8(dw);/* round 5 */
1112 CAMELLIA_SUBKEY_R(6) = CAMELLIA_SUBKEY_L(6) ^ dw,
1113 CAMELLIA_SUBKEY_L(6) = dw;
1114 dw = CAMELLIA_SUBKEY_L(7) ^ CAMELLIA_SUBKEY_R(7),
1115 dw = CAMELLIA_RL8(dw);/* round 6 */
1116 CAMELLIA_SUBKEY_R(7) = CAMELLIA_SUBKEY_L(7) ^ dw,
1117 CAMELLIA_SUBKEY_L(7) = dw;
1118 dw = CAMELLIA_SUBKEY_L(10) ^ CAMELLIA_SUBKEY_R(10),
1119 dw = CAMELLIA_RL8(dw);/* round 7 */
1120 CAMELLIA_SUBKEY_R(10) = CAMELLIA_SUBKEY_L(10) ^ dw,
1121 CAMELLIA_SUBKEY_L(10) = dw;
1122 dw = CAMELLIA_SUBKEY_L(11) ^ CAMELLIA_SUBKEY_R(11),
1123 dw = CAMELLIA_RL8(dw);/* round 8 */
1124 CAMELLIA_SUBKEY_R(11) = CAMELLIA_SUBKEY_L(11) ^ dw,
1125 CAMELLIA_SUBKEY_L(11) = dw;
1126 dw = CAMELLIA_SUBKEY_L(12) ^ CAMELLIA_SUBKEY_R(12),
1127 dw = CAMELLIA_RL8(dw);/* round 9 */
1128 CAMELLIA_SUBKEY_R(12) = CAMELLIA_SUBKEY_L(12) ^ dw,
1129 CAMELLIA_SUBKEY_L(12) = dw;
1130 dw = CAMELLIA_SUBKEY_L(13) ^ CAMELLIA_SUBKEY_R(13),
1131 dw = CAMELLIA_RL8(dw);/* round 10 */
1132 CAMELLIA_SUBKEY_R(13) = CAMELLIA_SUBKEY_L(13) ^ dw,
1133 CAMELLIA_SUBKEY_L(13) = dw;
1134 dw = CAMELLIA_SUBKEY_L(14) ^ CAMELLIA_SUBKEY_R(14),
1135 dw = CAMELLIA_RL8(dw);/* round 11 */
1136 CAMELLIA_SUBKEY_R(14) = CAMELLIA_SUBKEY_L(14) ^ dw,
1137 CAMELLIA_SUBKEY_L(14) = dw;
1138 dw = CAMELLIA_SUBKEY_L(15) ^ CAMELLIA_SUBKEY_R(15),
1139 dw = CAMELLIA_RL8(dw);/* round 12 */
1140 CAMELLIA_SUBKEY_R(15) = CAMELLIA_SUBKEY_L(15) ^ dw,
1141 CAMELLIA_SUBKEY_L(15) = dw;
1142 dw = CAMELLIA_SUBKEY_L(18) ^ CAMELLIA_SUBKEY_R(18),
1143 dw = CAMELLIA_RL8(dw);/* round 13 */
1144 CAMELLIA_SUBKEY_R(18) = CAMELLIA_SUBKEY_L(18) ^ dw,
1145 CAMELLIA_SUBKEY_L(18) = dw;
1146 dw = CAMELLIA_SUBKEY_L(19) ^ CAMELLIA_SUBKEY_R(19),
1147 dw = CAMELLIA_RL8(dw);/* round 14 */
1148 CAMELLIA_SUBKEY_R(19) = CAMELLIA_SUBKEY_L(19) ^ dw,
1149 CAMELLIA_SUBKEY_L(19) = dw;
1150 dw = CAMELLIA_SUBKEY_L(20) ^ CAMELLIA_SUBKEY_R(20),
1151 dw = CAMELLIA_RL8(dw);/* round 15 */
1152 CAMELLIA_SUBKEY_R(20) = CAMELLIA_SUBKEY_L(20) ^ dw,
1153 CAMELLIA_SUBKEY_L(20) = dw;
1154 dw = CAMELLIA_SUBKEY_L(21) ^ CAMELLIA_SUBKEY_R(21),
1155 dw = CAMELLIA_RL8(dw);/* round 16 */
1156 CAMELLIA_SUBKEY_R(21) = CAMELLIA_SUBKEY_L(21) ^ dw,
1157 CAMELLIA_SUBKEY_L(21) = dw;
1158 dw = CAMELLIA_SUBKEY_L(22) ^ CAMELLIA_SUBKEY_R(22),
1159 dw = CAMELLIA_RL8(dw);/* round 17 */
1160 CAMELLIA_SUBKEY_R(22) = CAMELLIA_SUBKEY_L(22) ^ dw,
1161 CAMELLIA_SUBKEY_L(22) = dw;
1162 dw = CAMELLIA_SUBKEY_L(23) ^ CAMELLIA_SUBKEY_R(23),
1163 dw = CAMELLIA_RL8(dw);/* round 18 */
1164 CAMELLIA_SUBKEY_R(23) = CAMELLIA_SUBKEY_L(23) ^ dw,
1165 CAMELLIA_SUBKEY_L(23) = dw;
1166 dw = CAMELLIA_SUBKEY_L(26) ^ CAMELLIA_SUBKEY_R(26),
1167 dw = CAMELLIA_RL8(dw);/* round 19 */
1168 CAMELLIA_SUBKEY_R(26) = CAMELLIA_SUBKEY_L(26) ^ dw,
1169 CAMELLIA_SUBKEY_L(26) = dw;
1170 dw = CAMELLIA_SUBKEY_L(27) ^ CAMELLIA_SUBKEY_R(27),
1171 dw = CAMELLIA_RL8(dw);/* round 20 */
1172 CAMELLIA_SUBKEY_R(27) = CAMELLIA_SUBKEY_L(27) ^ dw,
1173 CAMELLIA_SUBKEY_L(27) = dw;
1174 dw = CAMELLIA_SUBKEY_L(28) ^ CAMELLIA_SUBKEY_R(28),
1175 dw = CAMELLIA_RL8(dw);/* round 21 */
1176 CAMELLIA_SUBKEY_R(28) = CAMELLIA_SUBKEY_L(28) ^ dw,
1177 CAMELLIA_SUBKEY_L(28) = dw;
1178 dw = CAMELLIA_SUBKEY_L(29) ^ CAMELLIA_SUBKEY_R(29),
1179 dw = CAMELLIA_RL8(dw);/* round 22 */
1180 CAMELLIA_SUBKEY_R(29) = CAMELLIA_SUBKEY_L(29) ^ dw,
1181 CAMELLIA_SUBKEY_L(29) = dw;
1182 dw = CAMELLIA_SUBKEY_L(30) ^ CAMELLIA_SUBKEY_R(30),
1183 dw = CAMELLIA_RL8(dw);/* round 23 */
1184 CAMELLIA_SUBKEY_R(30) = CAMELLIA_SUBKEY_L(30) ^ dw,
1185 CAMELLIA_SUBKEY_L(30) = dw;
1186 dw = CAMELLIA_SUBKEY_L(31) ^ CAMELLIA_SUBKEY_R(31),
1187 dw = CAMELLIA_RL8(dw);/* round 24 */
1188 CAMELLIA_SUBKEY_R(31) = CAMELLIA_SUBKEY_L(31) ^ dw,
1189 CAMELLIA_SUBKEY_L(31) = dw;
1190
1191 return;
1192} 845}
1193 846
1194static void camellia_setup192(const unsigned char *key, u32 *subkey) 847static void camellia_setup192(const unsigned char *key, u32 *subkey)
@@ -1197,482 +850,168 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
1197 u32 krll, krlr, krrl,krrr; 850 u32 krll, krlr, krrl,krrr;
1198 851
1199 memcpy(kk, key, 24); 852 memcpy(kk, key, 24);
1200 memcpy((unsigned char *)&krll, key+16,4); 853 memcpy((unsigned char *)&krll, key+16, 4);
1201 memcpy((unsigned char *)&krlr, key+20,4); 854 memcpy((unsigned char *)&krlr, key+20, 4);
1202 krrl = ~krll; 855 krrl = ~krll;
1203 krrr = ~krlr; 856 krrr = ~krlr;
1204 memcpy(kk+24, (unsigned char *)&krrl, 4); 857 memcpy(kk+24, (unsigned char *)&krrl, 4);
1205 memcpy(kk+28, (unsigned char *)&krrr, 4); 858 memcpy(kk+28, (unsigned char *)&krrr, 4);
1206 camellia_setup256(kk, subkey); 859 camellia_setup256(kk, subkey);
1207 return;
1208} 860}
1209 861
1210 862
1211/** 863/*
1212 * Stuff related to camellia encryption/decryption 864 * Encrypt/decrypt
1213 */ 865 */
1214static void camellia_encrypt128(const u32 *subkey, __be32 *io_text) 866#define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) \
1215{ 867 do { \
1216 u32 il,ir,t0,t1; /* temporary valiables */ 868 t0 = kll; \
1217 869 t2 = krr; \
1218 u32 io[4]; 870 t0 &= ll; \
1219 871 t2 |= rr; \
1220 io[0] = be32_to_cpu(io_text[0]); 872 rl ^= t2; \
1221 io[1] = be32_to_cpu(io_text[1]); 873 lr ^= ROL1(t0); \
1222 io[2] = be32_to_cpu(io_text[2]); 874 t3 = krl; \
1223 io[3] = be32_to_cpu(io_text[3]); 875 t1 = klr; \
1224 876 t3 &= rl; \
1225 /* pre whitening but absorb kw2*/ 877 t1 |= lr; \
1226 io[0] ^= CAMELLIA_SUBKEY_L(0); 878 ll ^= t1; \
1227 io[1] ^= CAMELLIA_SUBKEY_R(0); 879 rr ^= ROL1(t3); \
1228 /* main iteration */ 880 } while(0)
1229
1230 CAMELLIA_ROUNDSM(io[0],io[1],
1231 CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2),
1232 io[2],io[3],il,ir,t0,t1);
1233 CAMELLIA_ROUNDSM(io[2],io[3],
1234 CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3),
1235 io[0],io[1],il,ir,t0,t1);
1236 CAMELLIA_ROUNDSM(io[0],io[1],
1237 CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4),
1238 io[2],io[3],il,ir,t0,t1);
1239 CAMELLIA_ROUNDSM(io[2],io[3],
1240 CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5),
1241 io[0],io[1],il,ir,t0,t1);
1242 CAMELLIA_ROUNDSM(io[0],io[1],
1243 CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6),
1244 io[2],io[3],il,ir,t0,t1);
1245 CAMELLIA_ROUNDSM(io[2],io[3],
1246 CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7),
1247 io[0],io[1],il,ir,t0,t1);
1248
1249 CAMELLIA_FLS(io[0],io[1],io[2],io[3],
1250 CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8),
1251 CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9),
1252 t0,t1,il,ir);
1253
1254 CAMELLIA_ROUNDSM(io[0],io[1],
1255 CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10),
1256 io[2],io[3],il,ir,t0,t1);
1257 CAMELLIA_ROUNDSM(io[2],io[3],
1258 CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11),
1259 io[0],io[1],il,ir,t0,t1);
1260 CAMELLIA_ROUNDSM(io[0],io[1],
1261 CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12),
1262 io[2],io[3],il,ir,t0,t1);
1263 CAMELLIA_ROUNDSM(io[2],io[3],
1264 CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13),
1265 io[0],io[1],il,ir,t0,t1);
1266 CAMELLIA_ROUNDSM(io[0],io[1],
1267 CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14),
1268 io[2],io[3],il,ir,t0,t1);
1269 CAMELLIA_ROUNDSM(io[2],io[3],
1270 CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15),
1271 io[0],io[1],il,ir,t0,t1);
1272
1273 CAMELLIA_FLS(io[0],io[1],io[2],io[3],
1274 CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16),
1275 CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17),
1276 t0,t1,il,ir);
1277
1278 CAMELLIA_ROUNDSM(io[0],io[1],
1279 CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18),
1280 io[2],io[3],il,ir,t0,t1);
1281 CAMELLIA_ROUNDSM(io[2],io[3],
1282 CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19),
1283 io[0],io[1],il,ir,t0,t1);
1284 CAMELLIA_ROUNDSM(io[0],io[1],
1285 CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20),
1286 io[2],io[3],il,ir,t0,t1);
1287 CAMELLIA_ROUNDSM(io[2],io[3],
1288 CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21),
1289 io[0],io[1],il,ir,t0,t1);
1290 CAMELLIA_ROUNDSM(io[0],io[1],
1291 CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22),
1292 io[2],io[3],il,ir,t0,t1);
1293 CAMELLIA_ROUNDSM(io[2],io[3],
1294 CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23),
1295 io[0],io[1],il,ir,t0,t1);
1296 881
1297 /* post whitening but kw4 */ 882#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir) \
1298 io[2] ^= CAMELLIA_SUBKEY_L(24); 883 do { \
1299 io[3] ^= CAMELLIA_SUBKEY_R(24); 884 ir = camellia_sp1110[(u8)xr]; \
1300 885 il = camellia_sp1110[ (xl >> 24)]; \
1301 t0 = io[0]; 886 ir ^= camellia_sp0222[ (xr >> 24)]; \
1302 t1 = io[1]; 887 il ^= camellia_sp0222[(u8)(xl >> 16)]; \
1303 io[0] = io[2]; 888 ir ^= camellia_sp3033[(u8)(xr >> 16)]; \
1304 io[1] = io[3]; 889 il ^= camellia_sp3033[(u8)(xl >> 8)]; \
1305 io[2] = t0; 890 ir ^= camellia_sp4404[(u8)(xr >> 8)]; \
1306 io[3] = t1; 891 il ^= camellia_sp4404[(u8)xl]; \
1307 892 il ^= kl; \
1308 io_text[0] = cpu_to_be32(io[0]); 893 ir ^= il ^ kr; \
1309 io_text[1] = cpu_to_be32(io[1]); 894 yl ^= ir; \
1310 io_text[2] = cpu_to_be32(io[2]); 895 yr ^= ROR8(il) ^ ir; \
1311 io_text[3] = cpu_to_be32(io[3]); 896 } while(0)
1312
1313 return;
1314}
1315 897
1316static void camellia_decrypt128(const u32 *subkey, __be32 *io_text) 898/* max = 24: 128bit encrypt, max = 32: 256bit encrypt */
899static void camellia_do_encrypt(const u32 *subkey, u32 *io, unsigned max)
1317{ 900{
1318 u32 il,ir,t0,t1; /* temporary valiables */ 901 u32 il,ir,t0,t1; /* temporary variables */
1319 902
1320 u32 io[4]; 903 /* pre whitening but absorb kw2 */
1321 904 io[0] ^= SUBKEY_L(0);
1322 io[0] = be32_to_cpu(io_text[0]); 905 io[1] ^= SUBKEY_R(0);
1323 io[1] = be32_to_cpu(io_text[1]);
1324 io[2] = be32_to_cpu(io_text[2]);
1325 io[3] = be32_to_cpu(io_text[3]);
1326
1327 /* pre whitening but absorb kw2*/
1328 io[0] ^= CAMELLIA_SUBKEY_L(24);
1329 io[1] ^= CAMELLIA_SUBKEY_R(24);
1330 906
1331 /* main iteration */ 907 /* main iteration */
1332 CAMELLIA_ROUNDSM(io[0],io[1], 908#define ROUNDS(i) do { \
1333 CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23), 909 CAMELLIA_ROUNDSM(io[0],io[1], \
1334 io[2],io[3],il,ir,t0,t1); 910 SUBKEY_L(i + 2),SUBKEY_R(i + 2), \
1335 CAMELLIA_ROUNDSM(io[2],io[3], 911 io[2],io[3],il,ir); \
1336 CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22), 912 CAMELLIA_ROUNDSM(io[2],io[3], \
1337 io[0],io[1],il,ir,t0,t1); 913 SUBKEY_L(i + 3),SUBKEY_R(i + 3), \
1338 CAMELLIA_ROUNDSM(io[0],io[1], 914 io[0],io[1],il,ir); \
1339 CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21), 915 CAMELLIA_ROUNDSM(io[0],io[1], \
1340 io[2],io[3],il,ir,t0,t1); 916 SUBKEY_L(i + 4),SUBKEY_R(i + 4), \
1341 CAMELLIA_ROUNDSM(io[2],io[3], 917 io[2],io[3],il,ir); \
1342 CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20), 918 CAMELLIA_ROUNDSM(io[2],io[3], \
1343 io[0],io[1],il,ir,t0,t1); 919 SUBKEY_L(i + 5),SUBKEY_R(i + 5), \
1344 CAMELLIA_ROUNDSM(io[0],io[1], 920 io[0],io[1],il,ir); \
1345 CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19), 921 CAMELLIA_ROUNDSM(io[0],io[1], \
1346 io[2],io[3],il,ir,t0,t1); 922 SUBKEY_L(i + 6),SUBKEY_R(i + 6), \
1347 CAMELLIA_ROUNDSM(io[2],io[3], 923 io[2],io[3],il,ir); \
1348 CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18), 924 CAMELLIA_ROUNDSM(io[2],io[3], \
1349 io[0],io[1],il,ir,t0,t1); 925 SUBKEY_L(i + 7),SUBKEY_R(i + 7), \
1350 926 io[0],io[1],il,ir); \
1351 CAMELLIA_FLS(io[0],io[1],io[2],io[3], 927} while (0)
1352 CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17), 928#define FLS(i) do { \
1353 CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16), 929 CAMELLIA_FLS(io[0],io[1],io[2],io[3], \
1354 t0,t1,il,ir); 930 SUBKEY_L(i + 0),SUBKEY_R(i + 0), \
1355 931 SUBKEY_L(i + 1),SUBKEY_R(i + 1), \
1356 CAMELLIA_ROUNDSM(io[0],io[1], 932 t0,t1,il,ir); \
1357 CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15), 933} while (0)
1358 io[2],io[3],il,ir,t0,t1); 934
1359 CAMELLIA_ROUNDSM(io[2],io[3], 935 ROUNDS(0);
1360 CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14), 936 FLS(8);
1361 io[0],io[1],il,ir,t0,t1); 937 ROUNDS(8);
1362 CAMELLIA_ROUNDSM(io[0],io[1], 938 FLS(16);
1363 CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13), 939 ROUNDS(16);
1364 io[2],io[3],il,ir,t0,t1); 940 if (max == 32) {
1365 CAMELLIA_ROUNDSM(io[2],io[3], 941 FLS(24);
1366 CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12), 942 ROUNDS(24);
1367 io[0],io[1],il,ir,t0,t1); 943 }
1368 CAMELLIA_ROUNDSM(io[0],io[1],
1369 CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11),
1370 io[2],io[3],il,ir,t0,t1);
1371 CAMELLIA_ROUNDSM(io[2],io[3],
1372 CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10),
1373 io[0],io[1],il,ir,t0,t1);
1374
1375 CAMELLIA_FLS(io[0],io[1],io[2],io[3],
1376 CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9),
1377 CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8),
1378 t0,t1,il,ir);
1379
1380 CAMELLIA_ROUNDSM(io[0],io[1],
1381 CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7),
1382 io[2],io[3],il,ir,t0,t1);
1383 CAMELLIA_ROUNDSM(io[2],io[3],
1384 CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6),
1385 io[0],io[1],il,ir,t0,t1);
1386 CAMELLIA_ROUNDSM(io[0],io[1],
1387 CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5),
1388 io[2],io[3],il,ir,t0,t1);
1389 CAMELLIA_ROUNDSM(io[2],io[3],
1390 CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4),
1391 io[0],io[1],il,ir,t0,t1);
1392 CAMELLIA_ROUNDSM(io[0],io[1],
1393 CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3),
1394 io[2],io[3],il,ir,t0,t1);
1395 CAMELLIA_ROUNDSM(io[2],io[3],
1396 CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2),
1397 io[0],io[1],il,ir,t0,t1);
1398
1399 /* post whitening but kw4 */
1400 io[2] ^= CAMELLIA_SUBKEY_L(0);
1401 io[3] ^= CAMELLIA_SUBKEY_R(0);
1402
1403 t0 = io[0];
1404 t1 = io[1];
1405 io[0] = io[2];
1406 io[1] = io[3];
1407 io[2] = t0;
1408 io[3] = t1;
1409
1410 io_text[0] = cpu_to_be32(io[0]);
1411 io_text[1] = cpu_to_be32(io[1]);
1412 io_text[2] = cpu_to_be32(io[2]);
1413 io_text[3] = cpu_to_be32(io[3]);
1414
1415 return;
1416}
1417
1418
1419/**
1420 * stuff for 192 and 256bit encryption/decryption
1421 */
1422static void camellia_encrypt256(const u32 *subkey, __be32 *io_text)
1423{
1424 u32 il,ir,t0,t1; /* temporary valiables */
1425
1426 u32 io[4];
1427
1428 io[0] = be32_to_cpu(io_text[0]);
1429 io[1] = be32_to_cpu(io_text[1]);
1430 io[2] = be32_to_cpu(io_text[2]);
1431 io[3] = be32_to_cpu(io_text[3]);
1432 944
1433 /* pre whitening but absorb kw2*/ 945#undef ROUNDS
1434 io[0] ^= CAMELLIA_SUBKEY_L(0); 946#undef FLS
1435 io[1] ^= CAMELLIA_SUBKEY_R(0);
1436
1437 /* main iteration */
1438 CAMELLIA_ROUNDSM(io[0],io[1],
1439 CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2),
1440 io[2],io[3],il,ir,t0,t1);
1441 CAMELLIA_ROUNDSM(io[2],io[3],
1442 CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3),
1443 io[0],io[1],il,ir,t0,t1);
1444 CAMELLIA_ROUNDSM(io[0],io[1],
1445 CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4),
1446 io[2],io[3],il,ir,t0,t1);
1447 CAMELLIA_ROUNDSM(io[2],io[3],
1448 CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5),
1449 io[0],io[1],il,ir,t0,t1);
1450 CAMELLIA_ROUNDSM(io[0],io[1],
1451 CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6),
1452 io[2],io[3],il,ir,t0,t1);
1453 CAMELLIA_ROUNDSM(io[2],io[3],
1454 CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7),
1455 io[0],io[1],il,ir,t0,t1);
1456
1457 CAMELLIA_FLS(io[0],io[1],io[2],io[3],
1458 CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8),
1459 CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9),
1460 t0,t1,il,ir);
1461
1462 CAMELLIA_ROUNDSM(io[0],io[1],
1463 CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10),
1464 io[2],io[3],il,ir,t0,t1);
1465 CAMELLIA_ROUNDSM(io[2],io[3],
1466 CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11),
1467 io[0],io[1],il,ir,t0,t1);
1468 CAMELLIA_ROUNDSM(io[0],io[1],
1469 CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12),
1470 io[2],io[3],il,ir,t0,t1);
1471 CAMELLIA_ROUNDSM(io[2],io[3],
1472 CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13),
1473 io[0],io[1],il,ir,t0,t1);
1474 CAMELLIA_ROUNDSM(io[0],io[1],
1475 CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14),
1476 io[2],io[3],il,ir,t0,t1);
1477 CAMELLIA_ROUNDSM(io[2],io[3],
1478 CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15),
1479 io[0],io[1],il,ir,t0,t1);
1480
1481 CAMELLIA_FLS(io[0],io[1],io[2],io[3],
1482 CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16),
1483 CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17),
1484 t0,t1,il,ir);
1485
1486 CAMELLIA_ROUNDSM(io[0],io[1],
1487 CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18),
1488 io[2],io[3],il,ir,t0,t1);
1489 CAMELLIA_ROUNDSM(io[2],io[3],
1490 CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19),
1491 io[0],io[1],il,ir,t0,t1);
1492 CAMELLIA_ROUNDSM(io[0],io[1],
1493 CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20),
1494 io[2],io[3],il,ir,t0,t1);
1495 CAMELLIA_ROUNDSM(io[2],io[3],
1496 CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21),
1497 io[0],io[1],il,ir,t0,t1);
1498 CAMELLIA_ROUNDSM(io[0],io[1],
1499 CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22),
1500 io[2],io[3],il,ir,t0,t1);
1501 CAMELLIA_ROUNDSM(io[2],io[3],
1502 CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23),
1503 io[0],io[1],il,ir,t0,t1);
1504
1505 CAMELLIA_FLS(io[0],io[1],io[2],io[3],
1506 CAMELLIA_SUBKEY_L(24),CAMELLIA_SUBKEY_R(24),
1507 CAMELLIA_SUBKEY_L(25),CAMELLIA_SUBKEY_R(25),
1508 t0,t1,il,ir);
1509
1510 CAMELLIA_ROUNDSM(io[0],io[1],
1511 CAMELLIA_SUBKEY_L(26),CAMELLIA_SUBKEY_R(26),
1512 io[2],io[3],il,ir,t0,t1);
1513 CAMELLIA_ROUNDSM(io[2],io[3],
1514 CAMELLIA_SUBKEY_L(27),CAMELLIA_SUBKEY_R(27),
1515 io[0],io[1],il,ir,t0,t1);
1516 CAMELLIA_ROUNDSM(io[0],io[1],
1517 CAMELLIA_SUBKEY_L(28),CAMELLIA_SUBKEY_R(28),
1518 io[2],io[3],il,ir,t0,t1);
1519 CAMELLIA_ROUNDSM(io[2],io[3],
1520 CAMELLIA_SUBKEY_L(29),CAMELLIA_SUBKEY_R(29),
1521 io[0],io[1],il,ir,t0,t1);
1522 CAMELLIA_ROUNDSM(io[0],io[1],
1523 CAMELLIA_SUBKEY_L(30),CAMELLIA_SUBKEY_R(30),
1524 io[2],io[3],il,ir,t0,t1);
1525 CAMELLIA_ROUNDSM(io[2],io[3],
1526 CAMELLIA_SUBKEY_L(31),CAMELLIA_SUBKEY_R(31),
1527 io[0],io[1],il,ir,t0,t1);
1528 947
1529 /* post whitening but kw4 */ 948 /* post whitening but kw4 */
1530 io[2] ^= CAMELLIA_SUBKEY_L(32); 949 io[2] ^= SUBKEY_L(max);
1531 io[3] ^= CAMELLIA_SUBKEY_R(32); 950 io[3] ^= SUBKEY_R(max);
1532 951 /* NB: io[0],[1] should be swapped with [2],[3] by caller! */
1533 t0 = io[0];
1534 t1 = io[1];
1535 io[0] = io[2];
1536 io[1] = io[3];
1537 io[2] = t0;
1538 io[3] = t1;
1539
1540 io_text[0] = cpu_to_be32(io[0]);
1541 io_text[1] = cpu_to_be32(io[1]);
1542 io_text[2] = cpu_to_be32(io[2]);
1543 io_text[3] = cpu_to_be32(io[3]);
1544
1545 return;
1546} 952}
1547 953
1548 954static void camellia_do_decrypt(const u32 *subkey, u32 *io, unsigned i)
1549static void camellia_decrypt256(const u32 *subkey, __be32 *io_text)
1550{ 955{
1551 u32 il,ir,t0,t1; /* temporary valiables */ 956 u32 il,ir,t0,t1; /* temporary variables */
1552 957
1553 u32 io[4]; 958 /* pre whitening but absorb kw2 */
1554 959 io[0] ^= SUBKEY_L(i);
1555 io[0] = be32_to_cpu(io_text[0]); 960 io[1] ^= SUBKEY_R(i);
1556 io[1] = be32_to_cpu(io_text[1]);
1557 io[2] = be32_to_cpu(io_text[2]);
1558 io[3] = be32_to_cpu(io_text[3]);
1559
1560 /* pre whitening but absorb kw2*/
1561 io[0] ^= CAMELLIA_SUBKEY_L(32);
1562 io[1] ^= CAMELLIA_SUBKEY_R(32);
1563 961
1564 /* main iteration */ 962 /* main iteration */
1565 CAMELLIA_ROUNDSM(io[0],io[1], 963#define ROUNDS(i) do { \
1566 CAMELLIA_SUBKEY_L(31),CAMELLIA_SUBKEY_R(31), 964 CAMELLIA_ROUNDSM(io[0],io[1], \
1567 io[2],io[3],il,ir,t0,t1); 965 SUBKEY_L(i + 7),SUBKEY_R(i + 7), \
1568 CAMELLIA_ROUNDSM(io[2],io[3], 966 io[2],io[3],il,ir); \
1569 CAMELLIA_SUBKEY_L(30),CAMELLIA_SUBKEY_R(30), 967 CAMELLIA_ROUNDSM(io[2],io[3], \
1570 io[0],io[1],il,ir,t0,t1); 968 SUBKEY_L(i + 6),SUBKEY_R(i + 6), \
1571 CAMELLIA_ROUNDSM(io[0],io[1], 969 io[0],io[1],il,ir); \
1572 CAMELLIA_SUBKEY_L(29),CAMELLIA_SUBKEY_R(29), 970 CAMELLIA_ROUNDSM(io[0],io[1], \
1573 io[2],io[3],il,ir,t0,t1); 971 SUBKEY_L(i + 5),SUBKEY_R(i + 5), \
1574 CAMELLIA_ROUNDSM(io[2],io[3], 972 io[2],io[3],il,ir); \
1575 CAMELLIA_SUBKEY_L(28),CAMELLIA_SUBKEY_R(28), 973 CAMELLIA_ROUNDSM(io[2],io[3], \
1576 io[0],io[1],il,ir,t0,t1); 974 SUBKEY_L(i + 4),SUBKEY_R(i + 4), \
1577 CAMELLIA_ROUNDSM(io[0],io[1], 975 io[0],io[1],il,ir); \
1578 CAMELLIA_SUBKEY_L(27),CAMELLIA_SUBKEY_R(27), 976 CAMELLIA_ROUNDSM(io[0],io[1], \
1579 io[2],io[3],il,ir,t0,t1); 977 SUBKEY_L(i + 3),SUBKEY_R(i + 3), \
1580 CAMELLIA_ROUNDSM(io[2],io[3], 978 io[2],io[3],il,ir); \
1581 CAMELLIA_SUBKEY_L(26),CAMELLIA_SUBKEY_R(26), 979 CAMELLIA_ROUNDSM(io[2],io[3], \
1582 io[0],io[1],il,ir,t0,t1); 980 SUBKEY_L(i + 2),SUBKEY_R(i + 2), \
1583 981 io[0],io[1],il,ir); \
1584 CAMELLIA_FLS(io[0],io[1],io[2],io[3], 982} while (0)
1585 CAMELLIA_SUBKEY_L(25),CAMELLIA_SUBKEY_R(25), 983#define FLS(i) do { \
1586 CAMELLIA_SUBKEY_L(24),CAMELLIA_SUBKEY_R(24), 984 CAMELLIA_FLS(io[0],io[1],io[2],io[3], \
1587 t0,t1,il,ir); 985 SUBKEY_L(i + 1),SUBKEY_R(i + 1), \
1588 986 SUBKEY_L(i + 0),SUBKEY_R(i + 0), \
1589 CAMELLIA_ROUNDSM(io[0],io[1], 987 t0,t1,il,ir); \
1590 CAMELLIA_SUBKEY_L(23),CAMELLIA_SUBKEY_R(23), 988} while (0)
1591 io[2],io[3],il,ir,t0,t1); 989
1592 CAMELLIA_ROUNDSM(io[2],io[3], 990 if (i == 32) {
1593 CAMELLIA_SUBKEY_L(22),CAMELLIA_SUBKEY_R(22), 991 ROUNDS(24);
1594 io[0],io[1],il,ir,t0,t1); 992 FLS(24);
1595 CAMELLIA_ROUNDSM(io[0],io[1], 993 }
1596 CAMELLIA_SUBKEY_L(21),CAMELLIA_SUBKEY_R(21), 994 ROUNDS(16);
1597 io[2],io[3],il,ir,t0,t1); 995 FLS(16);
1598 CAMELLIA_ROUNDSM(io[2],io[3], 996 ROUNDS(8);
1599 CAMELLIA_SUBKEY_L(20),CAMELLIA_SUBKEY_R(20), 997 FLS(8);
1600 io[0],io[1],il,ir,t0,t1); 998 ROUNDS(0);
1601 CAMELLIA_ROUNDSM(io[0],io[1], 999
1602 CAMELLIA_SUBKEY_L(19),CAMELLIA_SUBKEY_R(19), 1000#undef ROUNDS
1603 io[2],io[3],il,ir,t0,t1); 1001#undef FLS
1604 CAMELLIA_ROUNDSM(io[2],io[3],
1605 CAMELLIA_SUBKEY_L(18),CAMELLIA_SUBKEY_R(18),
1606 io[0],io[1],il,ir,t0,t1);
1607
1608 CAMELLIA_FLS(io[0],io[1],io[2],io[3],
1609 CAMELLIA_SUBKEY_L(17),CAMELLIA_SUBKEY_R(17),
1610 CAMELLIA_SUBKEY_L(16),CAMELLIA_SUBKEY_R(16),
1611 t0,t1,il,ir);
1612
1613 CAMELLIA_ROUNDSM(io[0],io[1],
1614 CAMELLIA_SUBKEY_L(15),CAMELLIA_SUBKEY_R(15),
1615 io[2],io[3],il,ir,t0,t1);
1616 CAMELLIA_ROUNDSM(io[2],io[3],
1617 CAMELLIA_SUBKEY_L(14),CAMELLIA_SUBKEY_R(14),
1618 io[0],io[1],il,ir,t0,t1);
1619 CAMELLIA_ROUNDSM(io[0],io[1],
1620 CAMELLIA_SUBKEY_L(13),CAMELLIA_SUBKEY_R(13),
1621 io[2],io[3],il,ir,t0,t1);
1622 CAMELLIA_ROUNDSM(io[2],io[3],
1623 CAMELLIA_SUBKEY_L(12),CAMELLIA_SUBKEY_R(12),
1624 io[0],io[1],il,ir,t0,t1);
1625 CAMELLIA_ROUNDSM(io[0],io[1],
1626 CAMELLIA_SUBKEY_L(11),CAMELLIA_SUBKEY_R(11),
1627 io[2],io[3],il,ir,t0,t1);
1628 CAMELLIA_ROUNDSM(io[2],io[3],
1629 CAMELLIA_SUBKEY_L(10),CAMELLIA_SUBKEY_R(10),
1630 io[0],io[1],il,ir,t0,t1);
1631
1632 CAMELLIA_FLS(io[0],io[1],io[2],io[3],
1633 CAMELLIA_SUBKEY_L(9),CAMELLIA_SUBKEY_R(9),
1634 CAMELLIA_SUBKEY_L(8),CAMELLIA_SUBKEY_R(8),
1635 t0,t1,il,ir);
1636
1637 CAMELLIA_ROUNDSM(io[0],io[1],
1638 CAMELLIA_SUBKEY_L(7),CAMELLIA_SUBKEY_R(7),
1639 io[2],io[3],il,ir,t0,t1);
1640 CAMELLIA_ROUNDSM(io[2],io[3],
1641 CAMELLIA_SUBKEY_L(6),CAMELLIA_SUBKEY_R(6),
1642 io[0],io[1],il,ir,t0,t1);
1643 CAMELLIA_ROUNDSM(io[0],io[1],
1644 CAMELLIA_SUBKEY_L(5),CAMELLIA_SUBKEY_R(5),
1645 io[2],io[3],il,ir,t0,t1);
1646 CAMELLIA_ROUNDSM(io[2],io[3],
1647 CAMELLIA_SUBKEY_L(4),CAMELLIA_SUBKEY_R(4),
1648 io[0],io[1],il,ir,t0,t1);
1649 CAMELLIA_ROUNDSM(io[0],io[1],
1650 CAMELLIA_SUBKEY_L(3),CAMELLIA_SUBKEY_R(3),
1651 io[2],io[3],il,ir,t0,t1);
1652 CAMELLIA_ROUNDSM(io[2],io[3],
1653 CAMELLIA_SUBKEY_L(2),CAMELLIA_SUBKEY_R(2),
1654 io[0],io[1],il,ir,t0,t1);
1655 1002
1656 /* post whitening but kw4 */ 1003 /* post whitening but kw4 */
1657 io[2] ^= CAMELLIA_SUBKEY_L(0); 1004 io[2] ^= SUBKEY_L(0);
1658 io[3] ^= CAMELLIA_SUBKEY_R(0); 1005 io[3] ^= SUBKEY_R(0);
1659 1006 /* NB: 0,1 should be swapped with 2,3 by caller! */
1660 t0 = io[0];
1661 t1 = io[1];
1662 io[0] = io[2];
1663 io[1] = io[3];
1664 io[2] = t0;
1665 io[3] = t1;
1666
1667 io_text[0] = cpu_to_be32(io[0]);
1668 io_text[1] = cpu_to_be32(io[1]);
1669 io_text[2] = cpu_to_be32(io[2]);
1670 io_text[3] = cpu_to_be32(io[3]);
1671
1672 return;
1673} 1007}
1674 1008
1675 1009
1010struct camellia_ctx {
1011 int key_length;
1012 u32 key_table[CAMELLIA_TABLE_BYTE_LEN / sizeof(u32)];
1013};
1014
1676static int 1015static int
1677camellia_set_key(struct crypto_tfm *tfm, const u8 *in_key, 1016camellia_set_key(struct crypto_tfm *tfm, const u8 *in_key,
1678 unsigned int key_len) 1017 unsigned int key_len)
@@ -1688,7 +1027,7 @@ camellia_set_key(struct crypto_tfm *tfm, const u8 *in_key,
1688 1027
1689 cctx->key_length = key_len; 1028 cctx->key_length = key_len;
1690 1029
1691 switch(key_len) { 1030 switch (key_len) {
1692 case 16: 1031 case 16:
1693 camellia_setup128(key, cctx->key_table); 1032 camellia_setup128(key, cctx->key_table);
1694 break; 1033 break;
@@ -1698,68 +1037,59 @@ camellia_set_key(struct crypto_tfm *tfm, const u8 *in_key,
1698 case 32: 1037 case 32:
1699 camellia_setup256(key, cctx->key_table); 1038 camellia_setup256(key, cctx->key_table);
1700 break; 1039 break;
1701 default:
1702 break;
1703 } 1040 }
1704 1041
1705 return 0; 1042 return 0;
1706} 1043}
1707 1044
1708
1709static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 1045static void camellia_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1710{ 1046{
1711 const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); 1047 const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm);
1712 const __be32 *src = (const __be32 *)in; 1048 const __be32 *src = (const __be32 *)in;
1713 __be32 *dst = (__be32 *)out; 1049 __be32 *dst = (__be32 *)out;
1714 1050
1715 __be32 tmp[4]; 1051 u32 tmp[4];
1716 1052
1717 memcpy(tmp, src, CAMELLIA_BLOCK_SIZE); 1053 tmp[0] = be32_to_cpu(src[0]);
1054 tmp[1] = be32_to_cpu(src[1]);
1055 tmp[2] = be32_to_cpu(src[2]);
1056 tmp[3] = be32_to_cpu(src[3]);
1718 1057
1719 switch (cctx->key_length) { 1058 camellia_do_encrypt(cctx->key_table, tmp,
1720 case 16: 1059 cctx->key_length == 16 ? 24 : 32 /* for key lengths of 24 and 32 */
1721 camellia_encrypt128(cctx->key_table, tmp); 1060 );
1722 break;
1723 case 24:
1724 /* fall through */
1725 case 32:
1726 camellia_encrypt256(cctx->key_table, tmp);
1727 break;
1728 default:
1729 break;
1730 }
1731 1061
1732 memcpy(dst, tmp, CAMELLIA_BLOCK_SIZE); 1062 /* do_encrypt returns 0,1 swapped with 2,3 */
1063 dst[0] = cpu_to_be32(tmp[2]);
1064 dst[1] = cpu_to_be32(tmp[3]);
1065 dst[2] = cpu_to_be32(tmp[0]);
1066 dst[3] = cpu_to_be32(tmp[1]);
1733} 1067}
1734 1068
1735
1736static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 1069static void camellia_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
1737{ 1070{
1738 const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); 1071 const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm);
1739 const __be32 *src = (const __be32 *)in; 1072 const __be32 *src = (const __be32 *)in;
1740 __be32 *dst = (__be32 *)out; 1073 __be32 *dst = (__be32 *)out;
1741 1074
1742 __be32 tmp[4]; 1075 u32 tmp[4];
1743 1076
1744 memcpy(tmp, src, CAMELLIA_BLOCK_SIZE); 1077 tmp[0] = be32_to_cpu(src[0]);
1078 tmp[1] = be32_to_cpu(src[1]);
1079 tmp[2] = be32_to_cpu(src[2]);
1080 tmp[3] = be32_to_cpu(src[3]);
1745 1081
1746 switch (cctx->key_length) { 1082 camellia_do_decrypt(cctx->key_table, tmp,
1747 case 16: 1083 cctx->key_length == 16 ? 24 : 32 /* for key lengths of 24 and 32 */
1748 camellia_decrypt128(cctx->key_table, tmp); 1084 );
1749 break;
1750 case 24:
1751 /* fall through */
1752 case 32:
1753 camellia_decrypt256(cctx->key_table, tmp);
1754 break;
1755 default:
1756 break;
1757 }
1758 1085
1759 memcpy(dst, tmp, CAMELLIA_BLOCK_SIZE); 1086 /* do_decrypt returns 0,1 swapped with 2,3 */
1087 dst[0] = cpu_to_be32(tmp[2]);
1088 dst[1] = cpu_to_be32(tmp[3]);
1089 dst[2] = cpu_to_be32(tmp[0]);
1090 dst[3] = cpu_to_be32(tmp[1]);
1760} 1091}
1761 1092
1762
1763static struct crypto_alg camellia_alg = { 1093static struct crypto_alg camellia_alg = {
1764 .cra_name = "camellia", 1094 .cra_name = "camellia",
1765 .cra_driver_name = "camellia-generic", 1095 .cra_driver_name = "camellia-generic",
@@ -1786,16 +1116,13 @@ static int __init camellia_init(void)
1786 return crypto_register_alg(&camellia_alg); 1116 return crypto_register_alg(&camellia_alg);
1787} 1117}
1788 1118
1789
1790static void __exit camellia_fini(void) 1119static void __exit camellia_fini(void)
1791{ 1120{
1792 crypto_unregister_alg(&camellia_alg); 1121 crypto_unregister_alg(&camellia_alg);
1793} 1122}
1794 1123
1795
1796module_init(camellia_init); 1124module_init(camellia_init);
1797module_exit(camellia_fini); 1125module_exit(camellia_fini);
1798 1126
1799
1800MODULE_DESCRIPTION("Camellia Cipher Algorithm"); 1127MODULE_DESCRIPTION("Camellia Cipher Algorithm");
1801MODULE_LICENSE("GPL"); 1128MODULE_LICENSE("GPL");
diff --git a/crypto/cast6.c b/crypto/cast6.c
index 136ab6dfe8c5..5fd9420dc58e 100644
--- a/crypto/cast6.c
+++ b/crypto/cast6.c
@@ -369,7 +369,7 @@ static const u8 Tr[4][8] = {
369}; 369};
370 370
371/* forward octave */ 371/* forward octave */
372static inline void W(u32 *key, unsigned int i) { 372static void W(u32 *key, unsigned int i) {
373 u32 I; 373 u32 I;
374 key[6] ^= F1(key[7], Tr[i % 4][0], Tm[i][0]); 374 key[6] ^= F1(key[7], Tr[i % 4][0], Tm[i][0]);
375 key[5] ^= F2(key[6], Tr[i % 4][1], Tm[i][1]); 375 key[5] ^= F2(key[6], Tr[i % 4][1], Tm[i][1]);
@@ -428,7 +428,7 @@ static int cast6_setkey(struct crypto_tfm *tfm, const u8 *in_key,
428} 428}
429 429
430/*forward quad round*/ 430/*forward quad round*/
431static inline void Q (u32 * block, u8 * Kr, u32 * Km) { 431static void Q (u32 * block, u8 * Kr, u32 * Km) {
432 u32 I; 432 u32 I;
433 block[2] ^= F1(block[3], Kr[0], Km[0]); 433 block[2] ^= F1(block[3], Kr[0], Km[0]);
434 block[1] ^= F2(block[2], Kr[1], Km[1]); 434 block[1] ^= F2(block[2], Kr[1], Km[1]);
@@ -437,7 +437,7 @@ static inline void Q (u32 * block, u8 * Kr, u32 * Km) {
437} 437}
438 438
439/*reverse quad round*/ 439/*reverse quad round*/
440static inline void QBAR (u32 * block, u8 * Kr, u32 * Km) { 440static void QBAR (u32 * block, u8 * Kr, u32 * Km) {
441 u32 I; 441 u32 I;
442 block[3] ^= F1(block[0], Kr[3], Km[3]); 442 block[3] ^= F1(block[0], Kr[3], Km[3]);
443 block[0] ^= F3(block[1], Kr[2], Km[2]); 443 block[0] ^= F3(block[1], Kr[2], Km[2]);
diff --git a/crypto/cbc.c b/crypto/cbc.c
index 1f2649e13b42..6affff882cf8 100644
--- a/crypto/cbc.c
+++ b/crypto/cbc.c
@@ -14,13 +14,13 @@
14#include <linux/err.h> 14#include <linux/err.h>
15#include <linux/init.h> 15#include <linux/init.h>
16#include <linux/kernel.h> 16#include <linux/kernel.h>
17#include <linux/log2.h>
17#include <linux/module.h> 18#include <linux/module.h>
18#include <linux/scatterlist.h> 19#include <linux/scatterlist.h>
19#include <linux/slab.h> 20#include <linux/slab.h>
20 21
21struct crypto_cbc_ctx { 22struct crypto_cbc_ctx {
22 struct crypto_cipher *child; 23 struct crypto_cipher *child;
23 void (*xor)(u8 *dst, const u8 *src, unsigned int bs);
24}; 24};
25 25
26static int crypto_cbc_setkey(struct crypto_tfm *parent, const u8 *key, 26static int crypto_cbc_setkey(struct crypto_tfm *parent, const u8 *key,
@@ -41,9 +41,7 @@ static int crypto_cbc_setkey(struct crypto_tfm *parent, const u8 *key,
41 41
42static int crypto_cbc_encrypt_segment(struct blkcipher_desc *desc, 42static int crypto_cbc_encrypt_segment(struct blkcipher_desc *desc,
43 struct blkcipher_walk *walk, 43 struct blkcipher_walk *walk,
44 struct crypto_cipher *tfm, 44 struct crypto_cipher *tfm)
45 void (*xor)(u8 *, const u8 *,
46 unsigned int))
47{ 45{
48 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 46 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
49 crypto_cipher_alg(tfm)->cia_encrypt; 47 crypto_cipher_alg(tfm)->cia_encrypt;
@@ -54,7 +52,7 @@ static int crypto_cbc_encrypt_segment(struct blkcipher_desc *desc,
54 u8 *iv = walk->iv; 52 u8 *iv = walk->iv;
55 53
56 do { 54 do {
57 xor(iv, src, bsize); 55 crypto_xor(iv, src, bsize);
58 fn(crypto_cipher_tfm(tfm), dst, iv); 56 fn(crypto_cipher_tfm(tfm), dst, iv);
59 memcpy(iv, dst, bsize); 57 memcpy(iv, dst, bsize);
60 58
@@ -67,9 +65,7 @@ static int crypto_cbc_encrypt_segment(struct blkcipher_desc *desc,
67 65
68static int crypto_cbc_encrypt_inplace(struct blkcipher_desc *desc, 66static int crypto_cbc_encrypt_inplace(struct blkcipher_desc *desc,
69 struct blkcipher_walk *walk, 67 struct blkcipher_walk *walk,
70 struct crypto_cipher *tfm, 68 struct crypto_cipher *tfm)
71 void (*xor)(u8 *, const u8 *,
72 unsigned int))
73{ 69{
74 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 70 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
75 crypto_cipher_alg(tfm)->cia_encrypt; 71 crypto_cipher_alg(tfm)->cia_encrypt;
@@ -79,7 +75,7 @@ static int crypto_cbc_encrypt_inplace(struct blkcipher_desc *desc,
79 u8 *iv = walk->iv; 75 u8 *iv = walk->iv;
80 76
81 do { 77 do {
82 xor(src, iv, bsize); 78 crypto_xor(src, iv, bsize);
83 fn(crypto_cipher_tfm(tfm), src, src); 79 fn(crypto_cipher_tfm(tfm), src, src);
84 iv = src; 80 iv = src;
85 81
@@ -99,7 +95,6 @@ static int crypto_cbc_encrypt(struct blkcipher_desc *desc,
99 struct crypto_blkcipher *tfm = desc->tfm; 95 struct crypto_blkcipher *tfm = desc->tfm;
100 struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm); 96 struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
101 struct crypto_cipher *child = ctx->child; 97 struct crypto_cipher *child = ctx->child;
102 void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor;
103 int err; 98 int err;
104 99
105 blkcipher_walk_init(&walk, dst, src, nbytes); 100 blkcipher_walk_init(&walk, dst, src, nbytes);
@@ -107,11 +102,9 @@ static int crypto_cbc_encrypt(struct blkcipher_desc *desc,
107 102
108 while ((nbytes = walk.nbytes)) { 103 while ((nbytes = walk.nbytes)) {
109 if (walk.src.virt.addr == walk.dst.virt.addr) 104 if (walk.src.virt.addr == walk.dst.virt.addr)
110 nbytes = crypto_cbc_encrypt_inplace(desc, &walk, child, 105 nbytes = crypto_cbc_encrypt_inplace(desc, &walk, child);
111 xor);
112 else 106 else
113 nbytes = crypto_cbc_encrypt_segment(desc, &walk, child, 107 nbytes = crypto_cbc_encrypt_segment(desc, &walk, child);
114 xor);
115 err = blkcipher_walk_done(desc, &walk, nbytes); 108 err = blkcipher_walk_done(desc, &walk, nbytes);
116 } 109 }
117 110
@@ -120,9 +113,7 @@ static int crypto_cbc_encrypt(struct blkcipher_desc *desc,
120 113
121static int crypto_cbc_decrypt_segment(struct blkcipher_desc *desc, 114static int crypto_cbc_decrypt_segment(struct blkcipher_desc *desc,
122 struct blkcipher_walk *walk, 115 struct blkcipher_walk *walk,
123 struct crypto_cipher *tfm, 116 struct crypto_cipher *tfm)
124 void (*xor)(u8 *, const u8 *,
125 unsigned int))
126{ 117{
127 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 118 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
128 crypto_cipher_alg(tfm)->cia_decrypt; 119 crypto_cipher_alg(tfm)->cia_decrypt;
@@ -134,7 +125,7 @@ static int crypto_cbc_decrypt_segment(struct blkcipher_desc *desc,
134 125
135 do { 126 do {
136 fn(crypto_cipher_tfm(tfm), dst, src); 127 fn(crypto_cipher_tfm(tfm), dst, src);
137 xor(dst, iv, bsize); 128 crypto_xor(dst, iv, bsize);
138 iv = src; 129 iv = src;
139 130
140 src += bsize; 131 src += bsize;
@@ -148,34 +139,29 @@ static int crypto_cbc_decrypt_segment(struct blkcipher_desc *desc,
148 139
149static int crypto_cbc_decrypt_inplace(struct blkcipher_desc *desc, 140static int crypto_cbc_decrypt_inplace(struct blkcipher_desc *desc,
150 struct blkcipher_walk *walk, 141 struct blkcipher_walk *walk,
151 struct crypto_cipher *tfm, 142 struct crypto_cipher *tfm)
152 void (*xor)(u8 *, const u8 *,
153 unsigned int))
154{ 143{
155 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 144 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
156 crypto_cipher_alg(tfm)->cia_decrypt; 145 crypto_cipher_alg(tfm)->cia_decrypt;
157 int bsize = crypto_cipher_blocksize(tfm); 146 int bsize = crypto_cipher_blocksize(tfm);
158 unsigned long alignmask = crypto_cipher_alignmask(tfm);
159 unsigned int nbytes = walk->nbytes; 147 unsigned int nbytes = walk->nbytes;
160 u8 *src = walk->src.virt.addr; 148 u8 *src = walk->src.virt.addr;
161 u8 stack[bsize + alignmask]; 149 u8 last_iv[bsize];
162 u8 *first_iv = (u8 *)ALIGN((unsigned long)stack, alignmask + 1);
163
164 memcpy(first_iv, walk->iv, bsize);
165 150
166 /* Start of the last block. */ 151 /* Start of the last block. */
167 src += nbytes - nbytes % bsize - bsize; 152 src += nbytes - (nbytes & (bsize - 1)) - bsize;
168 memcpy(walk->iv, src, bsize); 153 memcpy(last_iv, src, bsize);
169 154
170 for (;;) { 155 for (;;) {
171 fn(crypto_cipher_tfm(tfm), src, src); 156 fn(crypto_cipher_tfm(tfm), src, src);
172 if ((nbytes -= bsize) < bsize) 157 if ((nbytes -= bsize) < bsize)
173 break; 158 break;
174 xor(src, src - bsize, bsize); 159 crypto_xor(src, src - bsize, bsize);
175 src -= bsize; 160 src -= bsize;
176 } 161 }
177 162
178 xor(src, first_iv, bsize); 163 crypto_xor(src, walk->iv, bsize);
164 memcpy(walk->iv, last_iv, bsize);
179 165
180 return nbytes; 166 return nbytes;
181} 167}
@@ -188,7 +174,6 @@ static int crypto_cbc_decrypt(struct blkcipher_desc *desc,
188 struct crypto_blkcipher *tfm = desc->tfm; 174 struct crypto_blkcipher *tfm = desc->tfm;
189 struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm); 175 struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
190 struct crypto_cipher *child = ctx->child; 176 struct crypto_cipher *child = ctx->child;
191 void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor;
192 int err; 177 int err;
193 178
194 blkcipher_walk_init(&walk, dst, src, nbytes); 179 blkcipher_walk_init(&walk, dst, src, nbytes);
@@ -196,48 +181,15 @@ static int crypto_cbc_decrypt(struct blkcipher_desc *desc,
196 181
197 while ((nbytes = walk.nbytes)) { 182 while ((nbytes = walk.nbytes)) {
198 if (walk.src.virt.addr == walk.dst.virt.addr) 183 if (walk.src.virt.addr == walk.dst.virt.addr)
199 nbytes = crypto_cbc_decrypt_inplace(desc, &walk, child, 184 nbytes = crypto_cbc_decrypt_inplace(desc, &walk, child);
200 xor);
201 else 185 else
202 nbytes = crypto_cbc_decrypt_segment(desc, &walk, child, 186 nbytes = crypto_cbc_decrypt_segment(desc, &walk, child);
203 xor);
204 err = blkcipher_walk_done(desc, &walk, nbytes); 187 err = blkcipher_walk_done(desc, &walk, nbytes);
205 } 188 }
206 189
207 return err; 190 return err;
208} 191}
209 192
210static void xor_byte(u8 *a, const u8 *b, unsigned int bs)
211{
212 do {
213 *a++ ^= *b++;
214 } while (--bs);
215}
216
217static void xor_quad(u8 *dst, const u8 *src, unsigned int bs)
218{
219 u32 *a = (u32 *)dst;
220 u32 *b = (u32 *)src;
221
222 do {
223 *a++ ^= *b++;
224 } while ((bs -= 4));
225}
226
227static void xor_64(u8 *a, const u8 *b, unsigned int bs)
228{
229 ((u32 *)a)[0] ^= ((u32 *)b)[0];
230 ((u32 *)a)[1] ^= ((u32 *)b)[1];
231}
232
233static void xor_128(u8 *a, const u8 *b, unsigned int bs)
234{
235 ((u32 *)a)[0] ^= ((u32 *)b)[0];
236 ((u32 *)a)[1] ^= ((u32 *)b)[1];
237 ((u32 *)a)[2] ^= ((u32 *)b)[2];
238 ((u32 *)a)[3] ^= ((u32 *)b)[3];
239}
240
241static int crypto_cbc_init_tfm(struct crypto_tfm *tfm) 193static int crypto_cbc_init_tfm(struct crypto_tfm *tfm)
242{ 194{
243 struct crypto_instance *inst = (void *)tfm->__crt_alg; 195 struct crypto_instance *inst = (void *)tfm->__crt_alg;
@@ -245,22 +197,6 @@ static int crypto_cbc_init_tfm(struct crypto_tfm *tfm)
245 struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm); 197 struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
246 struct crypto_cipher *cipher; 198 struct crypto_cipher *cipher;
247 199
248 switch (crypto_tfm_alg_blocksize(tfm)) {
249 case 8:
250 ctx->xor = xor_64;
251 break;
252
253 case 16:
254 ctx->xor = xor_128;
255 break;
256
257 default:
258 if (crypto_tfm_alg_blocksize(tfm) % 4)
259 ctx->xor = xor_byte;
260 else
261 ctx->xor = xor_quad;
262 }
263
264 cipher = crypto_spawn_cipher(spawn); 200 cipher = crypto_spawn_cipher(spawn);
265 if (IS_ERR(cipher)) 201 if (IS_ERR(cipher))
266 return PTR_ERR(cipher); 202 return PTR_ERR(cipher);
@@ -290,6 +226,10 @@ static struct crypto_instance *crypto_cbc_alloc(struct rtattr **tb)
290 if (IS_ERR(alg)) 226 if (IS_ERR(alg))
291 return ERR_PTR(PTR_ERR(alg)); 227 return ERR_PTR(PTR_ERR(alg));
292 228
229 inst = ERR_PTR(-EINVAL);
230 if (!is_power_of_2(alg->cra_blocksize))
231 goto out_put_alg;
232
293 inst = crypto_alloc_instance("cbc", alg); 233 inst = crypto_alloc_instance("cbc", alg);
294 if (IS_ERR(inst)) 234 if (IS_ERR(inst))
295 goto out_put_alg; 235 goto out_put_alg;
@@ -300,8 +240,9 @@ static struct crypto_instance *crypto_cbc_alloc(struct rtattr **tb)
300 inst->alg.cra_alignmask = alg->cra_alignmask; 240 inst->alg.cra_alignmask = alg->cra_alignmask;
301 inst->alg.cra_type = &crypto_blkcipher_type; 241 inst->alg.cra_type = &crypto_blkcipher_type;
302 242
303 if (!(alg->cra_blocksize % 4)) 243 /* We access the data as u32s when xoring. */
304 inst->alg.cra_alignmask |= 3; 244 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
245
305 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; 246 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
306 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; 247 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
307 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize; 248 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
diff --git a/crypto/ccm.c b/crypto/ccm.c
new file mode 100644
index 000000000000..7cf7e5a6b781
--- /dev/null
+++ b/crypto/ccm.c
@@ -0,0 +1,889 @@
1/*
2 * CCM: Counter with CBC-MAC
3 *
4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#include <crypto/internal/aead.h>
14#include <crypto/internal/skcipher.h>
15#include <crypto/scatterwalk.h>
16#include <linux/err.h>
17#include <linux/init.h>
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/slab.h>
21
22#include "internal.h"
23
24struct ccm_instance_ctx {
25 struct crypto_skcipher_spawn ctr;
26 struct crypto_spawn cipher;
27};
28
29struct crypto_ccm_ctx {
30 struct crypto_cipher *cipher;
31 struct crypto_ablkcipher *ctr;
32};
33
34struct crypto_rfc4309_ctx {
35 struct crypto_aead *child;
36 u8 nonce[3];
37};
38
39struct crypto_ccm_req_priv_ctx {
40 u8 odata[16];
41 u8 idata[16];
42 u8 auth_tag[16];
43 u32 ilen;
44 u32 flags;
45 struct scatterlist src[2];
46 struct scatterlist dst[2];
47 struct ablkcipher_request abreq;
48};
49
50static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
51 struct aead_request *req)
52{
53 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
54
55 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
56}
57
58static int set_msg_len(u8 *block, unsigned int msglen, int csize)
59{
60 __be32 data;
61
62 memset(block, 0, csize);
63 block += csize;
64
65 if (csize >= 4)
66 csize = 4;
67 else if (msglen > (1 << (8 * csize)))
68 return -EOVERFLOW;
69
70 data = cpu_to_be32(msglen);
71 memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
72
73 return 0;
74}
75
76static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
77 unsigned int keylen)
78{
79 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
80 struct crypto_ablkcipher *ctr = ctx->ctr;
81 struct crypto_cipher *tfm = ctx->cipher;
82 int err = 0;
83
84 crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
85 crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
86 CRYPTO_TFM_REQ_MASK);
87 err = crypto_ablkcipher_setkey(ctr, key, keylen);
88 crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
89 CRYPTO_TFM_RES_MASK);
90 if (err)
91 goto out;
92
93 crypto_cipher_clear_flags(tfm, CRYPTO_TFM_REQ_MASK);
94 crypto_cipher_set_flags(tfm, crypto_aead_get_flags(aead) &
95 CRYPTO_TFM_REQ_MASK);
96 err = crypto_cipher_setkey(tfm, key, keylen);
97 crypto_aead_set_flags(aead, crypto_cipher_get_flags(tfm) &
98 CRYPTO_TFM_RES_MASK);
99
100out:
101 return err;
102}
103
104static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
105 unsigned int authsize)
106{
107 switch (authsize) {
108 case 4:
109 case 6:
110 case 8:
111 case 10:
112 case 12:
113 case 14:
114 case 16:
115 break;
116 default:
117 return -EINVAL;
118 }
119
120 return 0;
121}
122
123static int format_input(u8 *info, struct aead_request *req,
124 unsigned int cryptlen)
125{
126 struct crypto_aead *aead = crypto_aead_reqtfm(req);
127 unsigned int lp = req->iv[0];
128 unsigned int l = lp + 1;
129 unsigned int m;
130
131 m = crypto_aead_authsize(aead);
132
133 memcpy(info, req->iv, 16);
134
135 /* format control info per RFC 3610 and
136 * NIST Special Publication 800-38C
137 */
138 *info |= (8 * ((m - 2) / 2));
139 if (req->assoclen)
140 *info |= 64;
141
142 return set_msg_len(info + 16 - l, cryptlen, l);
143}
144
145static int format_adata(u8 *adata, unsigned int a)
146{
147 int len = 0;
148
149 /* add control info for associated data
150 * RFC 3610 and NIST Special Publication 800-38C
151 */
152 if (a < 65280) {
153 *(__be16 *)adata = cpu_to_be16(a);
154 len = 2;
155 } else {
156 *(__be16 *)adata = cpu_to_be16(0xfffe);
157 *(__be32 *)&adata[2] = cpu_to_be32(a);
158 len = 6;
159 }
160
161 return len;
162}
163
164static void compute_mac(struct crypto_cipher *tfm, u8 *data, int n,
165 struct crypto_ccm_req_priv_ctx *pctx)
166{
167 unsigned int bs = 16;
168 u8 *odata = pctx->odata;
169 u8 *idata = pctx->idata;
170 int datalen, getlen;
171
172 datalen = n;
173
174 /* first time in here, block may be partially filled. */
175 getlen = bs - pctx->ilen;
176 if (datalen >= getlen) {
177 memcpy(idata + pctx->ilen, data, getlen);
178 crypto_xor(odata, idata, bs);
179 crypto_cipher_encrypt_one(tfm, odata, odata);
180 datalen -= getlen;
181 data += getlen;
182 pctx->ilen = 0;
183 }
184
185 /* now encrypt rest of data */
186 while (datalen >= bs) {
187 crypto_xor(odata, data, bs);
188 crypto_cipher_encrypt_one(tfm, odata, odata);
189
190 datalen -= bs;
191 data += bs;
192 }
193
194 /* check and see if there's leftover data that wasn't
195 * enough to fill a block.
196 */
197 if (datalen) {
198 memcpy(idata + pctx->ilen, data, datalen);
199 pctx->ilen += datalen;
200 }
201}
202
203static void get_data_to_compute(struct crypto_cipher *tfm,
204 struct crypto_ccm_req_priv_ctx *pctx,
205 struct scatterlist *sg, unsigned int len)
206{
207 struct scatter_walk walk;
208 u8 *data_src;
209 int n;
210
211 scatterwalk_start(&walk, sg);
212
213 while (len) {
214 n = scatterwalk_clamp(&walk, len);
215 if (!n) {
216 scatterwalk_start(&walk, sg_next(walk.sg));
217 n = scatterwalk_clamp(&walk, len);
218 }
219 data_src = scatterwalk_map(&walk, 0);
220
221 compute_mac(tfm, data_src, n, pctx);
222 len -= n;
223
224 scatterwalk_unmap(data_src, 0);
225 scatterwalk_advance(&walk, n);
226 scatterwalk_done(&walk, 0, len);
227 if (len)
228 crypto_yield(pctx->flags);
229 }
230
231 /* any leftover needs padding and then encrypted */
232 if (pctx->ilen) {
233 int padlen;
234 u8 *odata = pctx->odata;
235 u8 *idata = pctx->idata;
236
237 padlen = 16 - pctx->ilen;
238 memset(idata + pctx->ilen, 0, padlen);
239 crypto_xor(odata, idata, 16);
240 crypto_cipher_encrypt_one(tfm, odata, odata);
241 pctx->ilen = 0;
242 }
243}
244
245static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
246 unsigned int cryptlen)
247{
248 struct crypto_aead *aead = crypto_aead_reqtfm(req);
249 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
250 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
251 struct crypto_cipher *cipher = ctx->cipher;
252 unsigned int assoclen = req->assoclen;
253 u8 *odata = pctx->odata;
254 u8 *idata = pctx->idata;
255 int err;
256
257 /* format control data for input */
258 err = format_input(odata, req, cryptlen);
259 if (err)
260 goto out;
261
262 /* encrypt first block to use as start in computing mac */
263 crypto_cipher_encrypt_one(cipher, odata, odata);
264
265 /* format associated data and compute into mac */
266 if (assoclen) {
267 pctx->ilen = format_adata(idata, assoclen);
268 get_data_to_compute(cipher, pctx, req->assoc, req->assoclen);
269 }
270
271 /* compute plaintext into mac */
272 get_data_to_compute(cipher, pctx, plain, cryptlen);
273
274out:
275 return err;
276}
277
278static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
279{
280 struct aead_request *req = areq->data;
281 struct crypto_aead *aead = crypto_aead_reqtfm(req);
282 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
283 u8 *odata = pctx->odata;
284
285 if (!err)
286 scatterwalk_map_and_copy(odata, req->dst, req->cryptlen,
287 crypto_aead_authsize(aead), 1);
288 aead_request_complete(req, err);
289}
290
291static inline int crypto_ccm_check_iv(const u8 *iv)
292{
293 /* 2 <= L <= 8, so 1 <= L' <= 7. */
294 if (1 > iv[0] || iv[0] > 7)
295 return -EINVAL;
296
297 return 0;
298}
299
300static int crypto_ccm_encrypt(struct aead_request *req)
301{
302 struct crypto_aead *aead = crypto_aead_reqtfm(req);
303 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
304 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
305 struct ablkcipher_request *abreq = &pctx->abreq;
306 struct scatterlist *dst;
307 unsigned int cryptlen = req->cryptlen;
308 u8 *odata = pctx->odata;
309 u8 *iv = req->iv;
310 int err;
311
312 err = crypto_ccm_check_iv(iv);
313 if (err)
314 return err;
315
316 pctx->flags = aead_request_flags(req);
317
318 err = crypto_ccm_auth(req, req->src, cryptlen);
319 if (err)
320 return err;
321
322 /* Note: rfc 3610 and NIST 800-38C require counter of
323 * zero to encrypt auth tag.
324 */
325 memset(iv + 15 - iv[0], 0, iv[0] + 1);
326
327 sg_init_table(pctx->src, 2);
328 sg_set_buf(pctx->src, odata, 16);
329 scatterwalk_sg_chain(pctx->src, 2, req->src);
330
331 dst = pctx->src;
332 if (req->src != req->dst) {
333 sg_init_table(pctx->dst, 2);
334 sg_set_buf(pctx->dst, odata, 16);
335 scatterwalk_sg_chain(pctx->dst, 2, req->dst);
336 dst = pctx->dst;
337 }
338
339 ablkcipher_request_set_tfm(abreq, ctx->ctr);
340 ablkcipher_request_set_callback(abreq, pctx->flags,
341 crypto_ccm_encrypt_done, req);
342 ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
343 err = crypto_ablkcipher_encrypt(abreq);
344 if (err)
345 return err;
346
347 /* copy authtag to end of dst */
348 scatterwalk_map_and_copy(odata, req->dst, cryptlen,
349 crypto_aead_authsize(aead), 1);
350 return err;
351}
352
353static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
354 int err)
355{
356 struct aead_request *req = areq->data;
357 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
358 struct crypto_aead *aead = crypto_aead_reqtfm(req);
359 unsigned int authsize = crypto_aead_authsize(aead);
360 unsigned int cryptlen = req->cryptlen - authsize;
361
362 if (!err) {
363 err = crypto_ccm_auth(req, req->dst, cryptlen);
364 if (!err && memcmp(pctx->auth_tag, pctx->odata, authsize))
365 err = -EBADMSG;
366 }
367 aead_request_complete(req, err);
368}
369
370static int crypto_ccm_decrypt(struct aead_request *req)
371{
372 struct crypto_aead *aead = crypto_aead_reqtfm(req);
373 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
374 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
375 struct ablkcipher_request *abreq = &pctx->abreq;
376 struct scatterlist *dst;
377 unsigned int authsize = crypto_aead_authsize(aead);
378 unsigned int cryptlen = req->cryptlen;
379 u8 *authtag = pctx->auth_tag;
380 u8 *odata = pctx->odata;
381 u8 *iv = req->iv;
382 int err;
383
384 if (cryptlen < authsize)
385 return -EINVAL;
386 cryptlen -= authsize;
387
388 err = crypto_ccm_check_iv(iv);
389 if (err)
390 return err;
391
392 pctx->flags = aead_request_flags(req);
393
394 scatterwalk_map_and_copy(authtag, req->src, cryptlen, authsize, 0);
395
396 memset(iv + 15 - iv[0], 0, iv[0] + 1);
397
398 sg_init_table(pctx->src, 2);
399 sg_set_buf(pctx->src, authtag, 16);
400 scatterwalk_sg_chain(pctx->src, 2, req->src);
401
402 dst = pctx->src;
403 if (req->src != req->dst) {
404 sg_init_table(pctx->dst, 2);
405 sg_set_buf(pctx->dst, authtag, 16);
406 scatterwalk_sg_chain(pctx->dst, 2, req->dst);
407 dst = pctx->dst;
408 }
409
410 ablkcipher_request_set_tfm(abreq, ctx->ctr);
411 ablkcipher_request_set_callback(abreq, pctx->flags,
412 crypto_ccm_decrypt_done, req);
413 ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
414 err = crypto_ablkcipher_decrypt(abreq);
415 if (err)
416 return err;
417
418 err = crypto_ccm_auth(req, req->dst, cryptlen);
419 if (err)
420 return err;
421
422 /* verify */
423 if (memcmp(authtag, odata, authsize))
424 return -EBADMSG;
425
426 return err;
427}
428
429static int crypto_ccm_init_tfm(struct crypto_tfm *tfm)
430{
431 struct crypto_instance *inst = (void *)tfm->__crt_alg;
432 struct ccm_instance_ctx *ictx = crypto_instance_ctx(inst);
433 struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm);
434 struct crypto_cipher *cipher;
435 struct crypto_ablkcipher *ctr;
436 unsigned long align;
437 int err;
438
439 cipher = crypto_spawn_cipher(&ictx->cipher);
440 if (IS_ERR(cipher))
441 return PTR_ERR(cipher);
442
443 ctr = crypto_spawn_skcipher(&ictx->ctr);
444 err = PTR_ERR(ctr);
445 if (IS_ERR(ctr))
446 goto err_free_cipher;
447
448 ctx->cipher = cipher;
449 ctx->ctr = ctr;
450
451 align = crypto_tfm_alg_alignmask(tfm);
452 align &= ~(crypto_tfm_ctx_alignment() - 1);
453 tfm->crt_aead.reqsize = align +
454 sizeof(struct crypto_ccm_req_priv_ctx) +
455 crypto_ablkcipher_reqsize(ctr);
456
457 return 0;
458
459err_free_cipher:
460 crypto_free_cipher(cipher);
461 return err;
462}
463
464static void crypto_ccm_exit_tfm(struct crypto_tfm *tfm)
465{
466 struct crypto_ccm_ctx *ctx = crypto_tfm_ctx(tfm);
467
468 crypto_free_cipher(ctx->cipher);
469 crypto_free_ablkcipher(ctx->ctr);
470}
471
472static struct crypto_instance *crypto_ccm_alloc_common(struct rtattr **tb,
473 const char *full_name,
474 const char *ctr_name,
475 const char *cipher_name)
476{
477 struct crypto_attr_type *algt;
478 struct crypto_instance *inst;
479 struct crypto_alg *ctr;
480 struct crypto_alg *cipher;
481 struct ccm_instance_ctx *ictx;
482 int err;
483
484 algt = crypto_get_attr_type(tb);
485 err = PTR_ERR(algt);
486 if (IS_ERR(algt))
487 return ERR_PTR(err);
488
489 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
490 return ERR_PTR(-EINVAL);
491
492 cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER,
493 CRYPTO_ALG_TYPE_MASK);
494 err = PTR_ERR(cipher);
495 if (IS_ERR(cipher))
496 return ERR_PTR(err);
497
498 err = -EINVAL;
499 if (cipher->cra_blocksize != 16)
500 goto out_put_cipher;
501
502 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
503 err = -ENOMEM;
504 if (!inst)
505 goto out_put_cipher;
506
507 ictx = crypto_instance_ctx(inst);
508
509 err = crypto_init_spawn(&ictx->cipher, cipher, inst,
510 CRYPTO_ALG_TYPE_MASK);
511 if (err)
512 goto err_free_inst;
513
514 crypto_set_skcipher_spawn(&ictx->ctr, inst);
515 err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
516 crypto_requires_sync(algt->type,
517 algt->mask));
518 if (err)
519 goto err_drop_cipher;
520
521 ctr = crypto_skcipher_spawn_alg(&ictx->ctr);
522
523 /* Not a stream cipher? */
524 err = -EINVAL;
525 if (ctr->cra_blocksize != 1)
526 goto err_drop_ctr;
527
528 /* We want the real thing! */
529 if (ctr->cra_ablkcipher.ivsize != 16)
530 goto err_drop_ctr;
531
532 err = -ENAMETOOLONG;
533 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
534 "ccm_base(%s,%s)", ctr->cra_driver_name,
535 cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
536 goto err_drop_ctr;
537
538 memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
539
540 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
541 inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC;
542 inst->alg.cra_priority = cipher->cra_priority + ctr->cra_priority;
543 inst->alg.cra_blocksize = 1;
544 inst->alg.cra_alignmask = cipher->cra_alignmask | ctr->cra_alignmask |
545 (__alignof__(u32) - 1);
546 inst->alg.cra_type = &crypto_aead_type;
547 inst->alg.cra_aead.ivsize = 16;
548 inst->alg.cra_aead.maxauthsize = 16;
549 inst->alg.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
550 inst->alg.cra_init = crypto_ccm_init_tfm;
551 inst->alg.cra_exit = crypto_ccm_exit_tfm;
552 inst->alg.cra_aead.setkey = crypto_ccm_setkey;
553 inst->alg.cra_aead.setauthsize = crypto_ccm_setauthsize;
554 inst->alg.cra_aead.encrypt = crypto_ccm_encrypt;
555 inst->alg.cra_aead.decrypt = crypto_ccm_decrypt;
556
557out:
558 crypto_mod_put(cipher);
559 return inst;
560
561err_drop_ctr:
562 crypto_drop_skcipher(&ictx->ctr);
563err_drop_cipher:
564 crypto_drop_spawn(&ictx->cipher);
565err_free_inst:
566 kfree(inst);
567out_put_cipher:
568 inst = ERR_PTR(err);
569 goto out;
570}
571
572static struct crypto_instance *crypto_ccm_alloc(struct rtattr **tb)
573{
574 int err;
575 const char *cipher_name;
576 char ctr_name[CRYPTO_MAX_ALG_NAME];
577 char full_name[CRYPTO_MAX_ALG_NAME];
578
579 cipher_name = crypto_attr_alg_name(tb[1]);
580 err = PTR_ERR(cipher_name);
581 if (IS_ERR(cipher_name))
582 return ERR_PTR(err);
583
584 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
585 cipher_name) >= CRYPTO_MAX_ALG_NAME)
586 return ERR_PTR(-ENAMETOOLONG);
587
588 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
589 CRYPTO_MAX_ALG_NAME)
590 return ERR_PTR(-ENAMETOOLONG);
591
592 return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name);
593}
594
595static void crypto_ccm_free(struct crypto_instance *inst)
596{
597 struct ccm_instance_ctx *ctx = crypto_instance_ctx(inst);
598
599 crypto_drop_spawn(&ctx->cipher);
600 crypto_drop_skcipher(&ctx->ctr);
601 kfree(inst);
602}
603
604static struct crypto_template crypto_ccm_tmpl = {
605 .name = "ccm",
606 .alloc = crypto_ccm_alloc,
607 .free = crypto_ccm_free,
608 .module = THIS_MODULE,
609};
610
611static struct crypto_instance *crypto_ccm_base_alloc(struct rtattr **tb)
612{
613 int err;
614 const char *ctr_name;
615 const char *cipher_name;
616 char full_name[CRYPTO_MAX_ALG_NAME];
617
618 ctr_name = crypto_attr_alg_name(tb[1]);
619 err = PTR_ERR(ctr_name);
620 if (IS_ERR(ctr_name))
621 return ERR_PTR(err);
622
623 cipher_name = crypto_attr_alg_name(tb[2]);
624 err = PTR_ERR(cipher_name);
625 if (IS_ERR(cipher_name))
626 return ERR_PTR(err);
627
628 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
629 ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
630 return ERR_PTR(-ENAMETOOLONG);
631
632 return crypto_ccm_alloc_common(tb, full_name, ctr_name, cipher_name);
633}
634
635static struct crypto_template crypto_ccm_base_tmpl = {
636 .name = "ccm_base",
637 .alloc = crypto_ccm_base_alloc,
638 .free = crypto_ccm_free,
639 .module = THIS_MODULE,
640};
641
642static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
643 unsigned int keylen)
644{
645 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
646 struct crypto_aead *child = ctx->child;
647 int err;
648
649 if (keylen < 3)
650 return -EINVAL;
651
652 keylen -= 3;
653 memcpy(ctx->nonce, key + keylen, 3);
654
655 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
656 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
657 CRYPTO_TFM_REQ_MASK);
658 err = crypto_aead_setkey(child, key, keylen);
659 crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
660 CRYPTO_TFM_RES_MASK);
661
662 return err;
663}
664
665static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
666 unsigned int authsize)
667{
668 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
669
670 switch (authsize) {
671 case 8:
672 case 12:
673 case 16:
674 break;
675 default:
676 return -EINVAL;
677 }
678
679 return crypto_aead_setauthsize(ctx->child, authsize);
680}
681
682static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
683{
684 struct aead_request *subreq = aead_request_ctx(req);
685 struct crypto_aead *aead = crypto_aead_reqtfm(req);
686 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
687 struct crypto_aead *child = ctx->child;
688 u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
689 crypto_aead_alignmask(child) + 1);
690
691 /* L' */
692 iv[0] = 3;
693
694 memcpy(iv + 1, ctx->nonce, 3);
695 memcpy(iv + 4, req->iv, 8);
696
697 aead_request_set_tfm(subreq, child);
698 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
699 req->base.data);
700 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv);
701 aead_request_set_assoc(subreq, req->assoc, req->assoclen);
702
703 return subreq;
704}
705
706static int crypto_rfc4309_encrypt(struct aead_request *req)
707{
708 req = crypto_rfc4309_crypt(req);
709
710 return crypto_aead_encrypt(req);
711}
712
713static int crypto_rfc4309_decrypt(struct aead_request *req)
714{
715 req = crypto_rfc4309_crypt(req);
716
717 return crypto_aead_decrypt(req);
718}
719
720static int crypto_rfc4309_init_tfm(struct crypto_tfm *tfm)
721{
722 struct crypto_instance *inst = (void *)tfm->__crt_alg;
723 struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst);
724 struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm);
725 struct crypto_aead *aead;
726 unsigned long align;
727
728 aead = crypto_spawn_aead(spawn);
729 if (IS_ERR(aead))
730 return PTR_ERR(aead);
731
732 ctx->child = aead;
733
734 align = crypto_aead_alignmask(aead);
735 align &= ~(crypto_tfm_ctx_alignment() - 1);
736 tfm->crt_aead.reqsize = sizeof(struct aead_request) +
737 ALIGN(crypto_aead_reqsize(aead),
738 crypto_tfm_ctx_alignment()) +
739 align + 16;
740
741 return 0;
742}
743
744static void crypto_rfc4309_exit_tfm(struct crypto_tfm *tfm)
745{
746 struct crypto_rfc4309_ctx *ctx = crypto_tfm_ctx(tfm);
747
748 crypto_free_aead(ctx->child);
749}
750
751static struct crypto_instance *crypto_rfc4309_alloc(struct rtattr **tb)
752{
753 struct crypto_attr_type *algt;
754 struct crypto_instance *inst;
755 struct crypto_aead_spawn *spawn;
756 struct crypto_alg *alg;
757 const char *ccm_name;
758 int err;
759
760 algt = crypto_get_attr_type(tb);
761 err = PTR_ERR(algt);
762 if (IS_ERR(algt))
763 return ERR_PTR(err);
764
765 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
766 return ERR_PTR(-EINVAL);
767
768 ccm_name = crypto_attr_alg_name(tb[1]);
769 err = PTR_ERR(ccm_name);
770 if (IS_ERR(ccm_name))
771 return ERR_PTR(err);
772
773 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
774 if (!inst)
775 return ERR_PTR(-ENOMEM);
776
777 spawn = crypto_instance_ctx(inst);
778 crypto_set_aead_spawn(spawn, inst);
779 err = crypto_grab_aead(spawn, ccm_name, 0,
780 crypto_requires_sync(algt->type, algt->mask));
781 if (err)
782 goto out_free_inst;
783
784 alg = crypto_aead_spawn_alg(spawn);
785
786 err = -EINVAL;
787
788 /* We only support 16-byte blocks. */
789 if (alg->cra_aead.ivsize != 16)
790 goto out_drop_alg;
791
792 /* Not a stream cipher? */
793 if (alg->cra_blocksize != 1)
794 goto out_drop_alg;
795
796 err = -ENAMETOOLONG;
797 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
798 "rfc4309(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME ||
799 snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
800 "rfc4309(%s)", alg->cra_driver_name) >=
801 CRYPTO_MAX_ALG_NAME)
802 goto out_drop_alg;
803
804 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
805 inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
806 inst->alg.cra_priority = alg->cra_priority;
807 inst->alg.cra_blocksize = 1;
808 inst->alg.cra_alignmask = alg->cra_alignmask;
809 inst->alg.cra_type = &crypto_nivaead_type;
810
811 inst->alg.cra_aead.ivsize = 8;
812 inst->alg.cra_aead.maxauthsize = 16;
813
814 inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
815
816 inst->alg.cra_init = crypto_rfc4309_init_tfm;
817 inst->alg.cra_exit = crypto_rfc4309_exit_tfm;
818
819 inst->alg.cra_aead.setkey = crypto_rfc4309_setkey;
820 inst->alg.cra_aead.setauthsize = crypto_rfc4309_setauthsize;
821 inst->alg.cra_aead.encrypt = crypto_rfc4309_encrypt;
822 inst->alg.cra_aead.decrypt = crypto_rfc4309_decrypt;
823
824 inst->alg.cra_aead.geniv = "seqiv";
825
826out:
827 return inst;
828
829out_drop_alg:
830 crypto_drop_aead(spawn);
831out_free_inst:
832 kfree(inst);
833 inst = ERR_PTR(err);
834 goto out;
835}
836
837static void crypto_rfc4309_free(struct crypto_instance *inst)
838{
839 crypto_drop_spawn(crypto_instance_ctx(inst));
840 kfree(inst);
841}
842
843static struct crypto_template crypto_rfc4309_tmpl = {
844 .name = "rfc4309",
845 .alloc = crypto_rfc4309_alloc,
846 .free = crypto_rfc4309_free,
847 .module = THIS_MODULE,
848};
849
850static int __init crypto_ccm_module_init(void)
851{
852 int err;
853
854 err = crypto_register_template(&crypto_ccm_base_tmpl);
855 if (err)
856 goto out;
857
858 err = crypto_register_template(&crypto_ccm_tmpl);
859 if (err)
860 goto out_undo_base;
861
862 err = crypto_register_template(&crypto_rfc4309_tmpl);
863 if (err)
864 goto out_undo_ccm;
865
866out:
867 return err;
868
869out_undo_ccm:
870 crypto_unregister_template(&crypto_ccm_tmpl);
871out_undo_base:
872 crypto_unregister_template(&crypto_ccm_base_tmpl);
873 goto out;
874}
875
876static void __exit crypto_ccm_module_exit(void)
877{
878 crypto_unregister_template(&crypto_rfc4309_tmpl);
879 crypto_unregister_template(&crypto_ccm_tmpl);
880 crypto_unregister_template(&crypto_ccm_base_tmpl);
881}
882
883module_init(crypto_ccm_module_init);
884module_exit(crypto_ccm_module_exit);
885
886MODULE_LICENSE("GPL");
887MODULE_DESCRIPTION("Counter with CBC MAC");
888MODULE_ALIAS("ccm_base");
889MODULE_ALIAS("rfc4309");
diff --git a/crypto/chainiv.c b/crypto/chainiv.c
new file mode 100644
index 000000000000..d17fa0454dc3
--- /dev/null
+++ b/crypto/chainiv.c
@@ -0,0 +1,331 @@
1/*
2 * chainiv: Chain IV Generator
3 *
4 * Generate IVs simply be using the last block of the previous encryption.
5 * This is mainly useful for CBC with a synchronous algorithm.
6 *
7 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
16#include <crypto/internal/skcipher.h>
17#include <linux/err.h>
18#include <linux/init.h>
19#include <linux/kernel.h>
20#include <linux/module.h>
21#include <linux/random.h>
22#include <linux/spinlock.h>
23#include <linux/string.h>
24#include <linux/workqueue.h>
25
26enum {
27 CHAINIV_STATE_INUSE = 0,
28};
29
30struct chainiv_ctx {
31 spinlock_t lock;
32 char iv[];
33};
34
35struct async_chainiv_ctx {
36 unsigned long state;
37
38 spinlock_t lock;
39 int err;
40
41 struct crypto_queue queue;
42 struct work_struct postponed;
43
44 char iv[];
45};
46
47static int chainiv_givencrypt(struct skcipher_givcrypt_request *req)
48{
49 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
50 struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
51 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
52 unsigned int ivsize;
53 int err;
54
55 ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
56 ablkcipher_request_set_callback(subreq, req->creq.base.flags &
57 ~CRYPTO_TFM_REQ_MAY_SLEEP,
58 req->creq.base.complete,
59 req->creq.base.data);
60 ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst,
61 req->creq.nbytes, req->creq.info);
62
63 spin_lock_bh(&ctx->lock);
64
65 ivsize = crypto_ablkcipher_ivsize(geniv);
66
67 memcpy(req->giv, ctx->iv, ivsize);
68 memcpy(subreq->info, ctx->iv, ivsize);
69
70 err = crypto_ablkcipher_encrypt(subreq);
71 if (err)
72 goto unlock;
73
74 memcpy(ctx->iv, subreq->info, ivsize);
75
76unlock:
77 spin_unlock_bh(&ctx->lock);
78
79 return err;
80}
81
82static int chainiv_givencrypt_first(struct skcipher_givcrypt_request *req)
83{
84 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
85 struct chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
86
87 spin_lock_bh(&ctx->lock);
88 if (crypto_ablkcipher_crt(geniv)->givencrypt !=
89 chainiv_givencrypt_first)
90 goto unlock;
91
92 crypto_ablkcipher_crt(geniv)->givencrypt = chainiv_givencrypt;
93 get_random_bytes(ctx->iv, crypto_ablkcipher_ivsize(geniv));
94
95unlock:
96 spin_unlock_bh(&ctx->lock);
97
98 return chainiv_givencrypt(req);
99}
100
101static int chainiv_init_common(struct crypto_tfm *tfm)
102{
103 tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request);
104
105 return skcipher_geniv_init(tfm);
106}
107
108static int chainiv_init(struct crypto_tfm *tfm)
109{
110 struct chainiv_ctx *ctx = crypto_tfm_ctx(tfm);
111
112 spin_lock_init(&ctx->lock);
113
114 return chainiv_init_common(tfm);
115}
116
117static int async_chainiv_schedule_work(struct async_chainiv_ctx *ctx)
118{
119 int queued;
120
121 if (!ctx->queue.qlen) {
122 smp_mb__before_clear_bit();
123 clear_bit(CHAINIV_STATE_INUSE, &ctx->state);
124
125 if (!ctx->queue.qlen ||
126 test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state))
127 goto out;
128 }
129
130 queued = schedule_work(&ctx->postponed);
131 BUG_ON(!queued);
132
133out:
134 return ctx->err;
135}
136
137static int async_chainiv_postpone_request(struct skcipher_givcrypt_request *req)
138{
139 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
140 struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
141 int err;
142
143 spin_lock_bh(&ctx->lock);
144 err = skcipher_enqueue_givcrypt(&ctx->queue, req);
145 spin_unlock_bh(&ctx->lock);
146
147 if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state))
148 return err;
149
150 ctx->err = err;
151 return async_chainiv_schedule_work(ctx);
152}
153
154static int async_chainiv_givencrypt_tail(struct skcipher_givcrypt_request *req)
155{
156 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
157 struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
158 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
159 unsigned int ivsize = crypto_ablkcipher_ivsize(geniv);
160
161 memcpy(req->giv, ctx->iv, ivsize);
162 memcpy(subreq->info, ctx->iv, ivsize);
163
164 ctx->err = crypto_ablkcipher_encrypt(subreq);
165 if (ctx->err)
166 goto out;
167
168 memcpy(ctx->iv, subreq->info, ivsize);
169
170out:
171 return async_chainiv_schedule_work(ctx);
172}
173
174static int async_chainiv_givencrypt(struct skcipher_givcrypt_request *req)
175{
176 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
177 struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
178 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
179
180 ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
181 ablkcipher_request_set_callback(subreq, req->creq.base.flags,
182 req->creq.base.complete,
183 req->creq.base.data);
184 ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst,
185 req->creq.nbytes, req->creq.info);
186
187 if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state))
188 goto postpone;
189
190 if (ctx->queue.qlen) {
191 clear_bit(CHAINIV_STATE_INUSE, &ctx->state);
192 goto postpone;
193 }
194
195 return async_chainiv_givencrypt_tail(req);
196
197postpone:
198 return async_chainiv_postpone_request(req);
199}
200
201static int async_chainiv_givencrypt_first(struct skcipher_givcrypt_request *req)
202{
203 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
204 struct async_chainiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
205
206 if (test_and_set_bit(CHAINIV_STATE_INUSE, &ctx->state))
207 goto out;
208
209 if (crypto_ablkcipher_crt(geniv)->givencrypt !=
210 async_chainiv_givencrypt_first)
211 goto unlock;
212
213 crypto_ablkcipher_crt(geniv)->givencrypt = async_chainiv_givencrypt;
214 get_random_bytes(ctx->iv, crypto_ablkcipher_ivsize(geniv));
215
216unlock:
217 clear_bit(CHAINIV_STATE_INUSE, &ctx->state);
218
219out:
220 return async_chainiv_givencrypt(req);
221}
222
223static void async_chainiv_do_postponed(struct work_struct *work)
224{
225 struct async_chainiv_ctx *ctx = container_of(work,
226 struct async_chainiv_ctx,
227 postponed);
228 struct skcipher_givcrypt_request *req;
229 struct ablkcipher_request *subreq;
230
231 /* Only handle one request at a time to avoid hogging keventd. */
232 spin_lock_bh(&ctx->lock);
233 req = skcipher_dequeue_givcrypt(&ctx->queue);
234 spin_unlock_bh(&ctx->lock);
235
236 if (!req) {
237 async_chainiv_schedule_work(ctx);
238 return;
239 }
240
241 subreq = skcipher_givcrypt_reqctx(req);
242 subreq->base.flags |= CRYPTO_TFM_REQ_MAY_SLEEP;
243
244 async_chainiv_givencrypt_tail(req);
245}
246
247static int async_chainiv_init(struct crypto_tfm *tfm)
248{
249 struct async_chainiv_ctx *ctx = crypto_tfm_ctx(tfm);
250
251 spin_lock_init(&ctx->lock);
252
253 crypto_init_queue(&ctx->queue, 100);
254 INIT_WORK(&ctx->postponed, async_chainiv_do_postponed);
255
256 return chainiv_init_common(tfm);
257}
258
259static void async_chainiv_exit(struct crypto_tfm *tfm)
260{
261 struct async_chainiv_ctx *ctx = crypto_tfm_ctx(tfm);
262
263 BUG_ON(test_bit(CHAINIV_STATE_INUSE, &ctx->state) || ctx->queue.qlen);
264
265 skcipher_geniv_exit(tfm);
266}
267
268static struct crypto_template chainiv_tmpl;
269
270static struct crypto_instance *chainiv_alloc(struct rtattr **tb)
271{
272 struct crypto_attr_type *algt;
273 struct crypto_instance *inst;
274 int err;
275
276 algt = crypto_get_attr_type(tb);
277 err = PTR_ERR(algt);
278 if (IS_ERR(algt))
279 return ERR_PTR(err);
280
281 inst = skcipher_geniv_alloc(&chainiv_tmpl, tb, 0, 0);
282 if (IS_ERR(inst))
283 goto out;
284
285 inst->alg.cra_ablkcipher.givencrypt = chainiv_givencrypt_first;
286
287 inst->alg.cra_init = chainiv_init;
288 inst->alg.cra_exit = skcipher_geniv_exit;
289
290 inst->alg.cra_ctxsize = sizeof(struct chainiv_ctx);
291
292 if (!crypto_requires_sync(algt->type, algt->mask)) {
293 inst->alg.cra_flags |= CRYPTO_ALG_ASYNC;
294
295 inst->alg.cra_ablkcipher.givencrypt =
296 async_chainiv_givencrypt_first;
297
298 inst->alg.cra_init = async_chainiv_init;
299 inst->alg.cra_exit = async_chainiv_exit;
300
301 inst->alg.cra_ctxsize = sizeof(struct async_chainiv_ctx);
302 }
303
304 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
305
306out:
307 return inst;
308}
309
310static struct crypto_template chainiv_tmpl = {
311 .name = "chainiv",
312 .alloc = chainiv_alloc,
313 .free = skcipher_geniv_free,
314 .module = THIS_MODULE,
315};
316
317static int __init chainiv_module_init(void)
318{
319 return crypto_register_template(&chainiv_tmpl);
320}
321
322static void __exit chainiv_module_exit(void)
323{
324 crypto_unregister_template(&chainiv_tmpl);
325}
326
327module_init(chainiv_module_init);
328module_exit(chainiv_module_exit);
329
330MODULE_LICENSE("GPL");
331MODULE_DESCRIPTION("Chain IV Generator");
diff --git a/crypto/cryptd.c b/crypto/cryptd.c
index 8bf2da835f7b..074298f2f8e3 100644
--- a/crypto/cryptd.c
+++ b/crypto/cryptd.c
@@ -228,7 +228,7 @@ static struct crypto_instance *cryptd_alloc_blkcipher(
228 struct crypto_alg *alg; 228 struct crypto_alg *alg;
229 229
230 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER, 230 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_BLKCIPHER,
231 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 231 CRYPTO_ALG_TYPE_MASK);
232 if (IS_ERR(alg)) 232 if (IS_ERR(alg))
233 return ERR_PTR(PTR_ERR(alg)); 233 return ERR_PTR(PTR_ERR(alg));
234 234
@@ -236,13 +236,15 @@ static struct crypto_instance *cryptd_alloc_blkcipher(
236 if (IS_ERR(inst)) 236 if (IS_ERR(inst))
237 goto out_put_alg; 237 goto out_put_alg;
238 238
239 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_ASYNC; 239 inst->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
240 inst->alg.cra_type = &crypto_ablkcipher_type; 240 inst->alg.cra_type = &crypto_ablkcipher_type;
241 241
242 inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize; 242 inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
243 inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize; 243 inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
244 inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize; 244 inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
245 245
246 inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
247
246 inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx); 248 inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
247 249
248 inst->alg.cra_init = cryptd_blkcipher_init_tfm; 250 inst->alg.cra_init = cryptd_blkcipher_init_tfm;
diff --git a/crypto/crypto_null.c b/crypto/crypto_null.c
index 29f77477d701..ff7b3de1bcfd 100644
--- a/crypto/crypto_null.c
+++ b/crypto/crypto_null.c
@@ -16,15 +16,17 @@
16 * (at your option) any later version. 16 * (at your option) any later version.
17 * 17 *
18 */ 18 */
19
20#include <crypto/internal/skcipher.h>
19#include <linux/init.h> 21#include <linux/init.h>
20#include <linux/module.h> 22#include <linux/module.h>
21#include <linux/mm.h> 23#include <linux/mm.h>
22#include <linux/crypto.h>
23#include <linux/string.h> 24#include <linux/string.h>
24 25
25#define NULL_KEY_SIZE 0 26#define NULL_KEY_SIZE 0
26#define NULL_BLOCK_SIZE 1 27#define NULL_BLOCK_SIZE 1
27#define NULL_DIGEST_SIZE 0 28#define NULL_DIGEST_SIZE 0
29#define NULL_IV_SIZE 0
28 30
29static int null_compress(struct crypto_tfm *tfm, const u8 *src, 31static int null_compress(struct crypto_tfm *tfm, const u8 *src,
30 unsigned int slen, u8 *dst, unsigned int *dlen) 32 unsigned int slen, u8 *dst, unsigned int *dlen)
@@ -55,6 +57,26 @@ static void null_crypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
55 memcpy(dst, src, NULL_BLOCK_SIZE); 57 memcpy(dst, src, NULL_BLOCK_SIZE);
56} 58}
57 59
60static int skcipher_null_crypt(struct blkcipher_desc *desc,
61 struct scatterlist *dst,
62 struct scatterlist *src, unsigned int nbytes)
63{
64 struct blkcipher_walk walk;
65 int err;
66
67 blkcipher_walk_init(&walk, dst, src, nbytes);
68 err = blkcipher_walk_virt(desc, &walk);
69
70 while (walk.nbytes) {
71 if (walk.src.virt.addr != walk.dst.virt.addr)
72 memcpy(walk.dst.virt.addr, walk.src.virt.addr,
73 walk.nbytes);
74 err = blkcipher_walk_done(desc, &walk, 0);
75 }
76
77 return err;
78}
79
58static struct crypto_alg compress_null = { 80static struct crypto_alg compress_null = {
59 .cra_name = "compress_null", 81 .cra_name = "compress_null",
60 .cra_flags = CRYPTO_ALG_TYPE_COMPRESS, 82 .cra_flags = CRYPTO_ALG_TYPE_COMPRESS,
@@ -76,6 +98,7 @@ static struct crypto_alg digest_null = {
76 .cra_list = LIST_HEAD_INIT(digest_null.cra_list), 98 .cra_list = LIST_HEAD_INIT(digest_null.cra_list),
77 .cra_u = { .digest = { 99 .cra_u = { .digest = {
78 .dia_digestsize = NULL_DIGEST_SIZE, 100 .dia_digestsize = NULL_DIGEST_SIZE,
101 .dia_setkey = null_setkey,
79 .dia_init = null_init, 102 .dia_init = null_init,
80 .dia_update = null_update, 103 .dia_update = null_update,
81 .dia_final = null_final } } 104 .dia_final = null_final } }
@@ -96,6 +119,25 @@ static struct crypto_alg cipher_null = {
96 .cia_decrypt = null_crypt } } 119 .cia_decrypt = null_crypt } }
97}; 120};
98 121
122static struct crypto_alg skcipher_null = {
123 .cra_name = "ecb(cipher_null)",
124 .cra_driver_name = "ecb-cipher_null",
125 .cra_priority = 100,
126 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
127 .cra_blocksize = NULL_BLOCK_SIZE,
128 .cra_type = &crypto_blkcipher_type,
129 .cra_ctxsize = 0,
130 .cra_module = THIS_MODULE,
131 .cra_list = LIST_HEAD_INIT(skcipher_null.cra_list),
132 .cra_u = { .blkcipher = {
133 .min_keysize = NULL_KEY_SIZE,
134 .max_keysize = NULL_KEY_SIZE,
135 .ivsize = NULL_IV_SIZE,
136 .setkey = null_setkey,
137 .encrypt = skcipher_null_crypt,
138 .decrypt = skcipher_null_crypt } }
139};
140
99MODULE_ALIAS("compress_null"); 141MODULE_ALIAS("compress_null");
100MODULE_ALIAS("digest_null"); 142MODULE_ALIAS("digest_null");
101MODULE_ALIAS("cipher_null"); 143MODULE_ALIAS("cipher_null");
@@ -108,27 +150,35 @@ static int __init init(void)
108 if (ret < 0) 150 if (ret < 0)
109 goto out; 151 goto out;
110 152
153 ret = crypto_register_alg(&skcipher_null);
154 if (ret < 0)
155 goto out_unregister_cipher;
156
111 ret = crypto_register_alg(&digest_null); 157 ret = crypto_register_alg(&digest_null);
112 if (ret < 0) { 158 if (ret < 0)
113 crypto_unregister_alg(&cipher_null); 159 goto out_unregister_skcipher;
114 goto out;
115 }
116 160
117 ret = crypto_register_alg(&compress_null); 161 ret = crypto_register_alg(&compress_null);
118 if (ret < 0) { 162 if (ret < 0)
119 crypto_unregister_alg(&digest_null); 163 goto out_unregister_digest;
120 crypto_unregister_alg(&cipher_null);
121 goto out;
122 }
123 164
124out: 165out:
125 return ret; 166 return ret;
167
168out_unregister_digest:
169 crypto_unregister_alg(&digest_null);
170out_unregister_skcipher:
171 crypto_unregister_alg(&skcipher_null);
172out_unregister_cipher:
173 crypto_unregister_alg(&cipher_null);
174 goto out;
126} 175}
127 176
128static void __exit fini(void) 177static void __exit fini(void)
129{ 178{
130 crypto_unregister_alg(&compress_null); 179 crypto_unregister_alg(&compress_null);
131 crypto_unregister_alg(&digest_null); 180 crypto_unregister_alg(&digest_null);
181 crypto_unregister_alg(&skcipher_null);
132 crypto_unregister_alg(&cipher_null); 182 crypto_unregister_alg(&cipher_null);
133} 183}
134 184
diff --git a/crypto/ctr.c b/crypto/ctr.c
new file mode 100644
index 000000000000..2d7425f0e7b8
--- /dev/null
+++ b/crypto/ctr.c
@@ -0,0 +1,422 @@
1/*
2 * CTR: Counter mode
3 *
4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#include <crypto/algapi.h>
14#include <crypto/ctr.h>
15#include <linux/err.h>
16#include <linux/init.h>
17#include <linux/kernel.h>
18#include <linux/module.h>
19#include <linux/random.h>
20#include <linux/scatterlist.h>
21#include <linux/slab.h>
22
23struct crypto_ctr_ctx {
24 struct crypto_cipher *child;
25};
26
27struct crypto_rfc3686_ctx {
28 struct crypto_blkcipher *child;
29 u8 nonce[CTR_RFC3686_NONCE_SIZE];
30};
31
32static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
33 unsigned int keylen)
34{
35 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent);
36 struct crypto_cipher *child = ctx->child;
37 int err;
38
39 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
40 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
41 CRYPTO_TFM_REQ_MASK);
42 err = crypto_cipher_setkey(child, key, keylen);
43 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
44 CRYPTO_TFM_RES_MASK);
45
46 return err;
47}
48
49static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
50 struct crypto_cipher *tfm)
51{
52 unsigned int bsize = crypto_cipher_blocksize(tfm);
53 unsigned long alignmask = crypto_cipher_alignmask(tfm);
54 u8 *ctrblk = walk->iv;
55 u8 tmp[bsize + alignmask];
56 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
57 u8 *src = walk->src.virt.addr;
58 u8 *dst = walk->dst.virt.addr;
59 unsigned int nbytes = walk->nbytes;
60
61 crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
62 crypto_xor(keystream, src, nbytes);
63 memcpy(dst, keystream, nbytes);
64
65 crypto_inc(ctrblk, bsize);
66}
67
68static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk,
69 struct crypto_cipher *tfm)
70{
71 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
72 crypto_cipher_alg(tfm)->cia_encrypt;
73 unsigned int bsize = crypto_cipher_blocksize(tfm);
74 u8 *ctrblk = walk->iv;
75 u8 *src = walk->src.virt.addr;
76 u8 *dst = walk->dst.virt.addr;
77 unsigned int nbytes = walk->nbytes;
78
79 do {
80 /* create keystream */
81 fn(crypto_cipher_tfm(tfm), dst, ctrblk);
82 crypto_xor(dst, src, bsize);
83
84 /* increment counter in counterblock */
85 crypto_inc(ctrblk, bsize);
86
87 src += bsize;
88 dst += bsize;
89 } while ((nbytes -= bsize) >= bsize);
90
91 return nbytes;
92}
93
94static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
95 struct crypto_cipher *tfm)
96{
97 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
98 crypto_cipher_alg(tfm)->cia_encrypt;
99 unsigned int bsize = crypto_cipher_blocksize(tfm);
100 unsigned long alignmask = crypto_cipher_alignmask(tfm);
101 unsigned int nbytes = walk->nbytes;
102 u8 *ctrblk = walk->iv;
103 u8 *src = walk->src.virt.addr;
104 u8 tmp[bsize + alignmask];
105 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
106
107 do {
108 /* create keystream */
109 fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
110 crypto_xor(src, keystream, bsize);
111
112 /* increment counter in counterblock */
113 crypto_inc(ctrblk, bsize);
114
115 src += bsize;
116 } while ((nbytes -= bsize) >= bsize);
117
118 return nbytes;
119}
120
121static int crypto_ctr_crypt(struct blkcipher_desc *desc,
122 struct scatterlist *dst, struct scatterlist *src,
123 unsigned int nbytes)
124{
125 struct blkcipher_walk walk;
126 struct crypto_blkcipher *tfm = desc->tfm;
127 struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm);
128 struct crypto_cipher *child = ctx->child;
129 unsigned int bsize = crypto_cipher_blocksize(child);
130 int err;
131
132 blkcipher_walk_init(&walk, dst, src, nbytes);
133 err = blkcipher_walk_virt_block(desc, &walk, bsize);
134
135 while (walk.nbytes >= bsize) {
136 if (walk.src.virt.addr == walk.dst.virt.addr)
137 nbytes = crypto_ctr_crypt_inplace(&walk, child);
138 else
139 nbytes = crypto_ctr_crypt_segment(&walk, child);
140
141 err = blkcipher_walk_done(desc, &walk, nbytes);
142 }
143
144 if (walk.nbytes) {
145 crypto_ctr_crypt_final(&walk, child);
146 err = blkcipher_walk_done(desc, &walk, 0);
147 }
148
149 return err;
150}
151
152static int crypto_ctr_init_tfm(struct crypto_tfm *tfm)
153{
154 struct crypto_instance *inst = (void *)tfm->__crt_alg;
155 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
156 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
157 struct crypto_cipher *cipher;
158
159 cipher = crypto_spawn_cipher(spawn);
160 if (IS_ERR(cipher))
161 return PTR_ERR(cipher);
162
163 ctx->child = cipher;
164
165 return 0;
166}
167
168static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
169{
170 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
171
172 crypto_free_cipher(ctx->child);
173}
174
175static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
176{
177 struct crypto_instance *inst;
178 struct crypto_alg *alg;
179 int err;
180
181 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
182 if (err)
183 return ERR_PTR(err);
184
185 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
186 CRYPTO_ALG_TYPE_MASK);
187 if (IS_ERR(alg))
188 return ERR_PTR(PTR_ERR(alg));
189
190 /* Block size must be >= 4 bytes. */
191 err = -EINVAL;
192 if (alg->cra_blocksize < 4)
193 goto out_put_alg;
194
195 /* If this is false we'd fail the alignment of crypto_inc. */
196 if (alg->cra_blocksize % 4)
197 goto out_put_alg;
198
199 inst = crypto_alloc_instance("ctr", alg);
200 if (IS_ERR(inst))
201 goto out;
202
203 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
204 inst->alg.cra_priority = alg->cra_priority;
205 inst->alg.cra_blocksize = 1;
206 inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1);
207 inst->alg.cra_type = &crypto_blkcipher_type;
208
209 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
210 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
211 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
212
213 inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx);
214
215 inst->alg.cra_init = crypto_ctr_init_tfm;
216 inst->alg.cra_exit = crypto_ctr_exit_tfm;
217
218 inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey;
219 inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
220 inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
221
222out:
223 crypto_mod_put(alg);
224 return inst;
225
226out_put_alg:
227 inst = ERR_PTR(err);
228 goto out;
229}
230
231static void crypto_ctr_free(struct crypto_instance *inst)
232{
233 crypto_drop_spawn(crypto_instance_ctx(inst));
234 kfree(inst);
235}
236
237static struct crypto_template crypto_ctr_tmpl = {
238 .name = "ctr",
239 .alloc = crypto_ctr_alloc,
240 .free = crypto_ctr_free,
241 .module = THIS_MODULE,
242};
243
244static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key,
245 unsigned int keylen)
246{
247 struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(parent);
248 struct crypto_blkcipher *child = ctx->child;
249 int err;
250
251 /* the nonce is stored in bytes at end of key */
252 if (keylen < CTR_RFC3686_NONCE_SIZE)
253 return -EINVAL;
254
255 memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
256 CTR_RFC3686_NONCE_SIZE);
257
258 keylen -= CTR_RFC3686_NONCE_SIZE;
259
260 crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
261 crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) &
262 CRYPTO_TFM_REQ_MASK);
263 err = crypto_blkcipher_setkey(child, key, keylen);
264 crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) &
265 CRYPTO_TFM_RES_MASK);
266
267 return err;
268}
269
270static int crypto_rfc3686_crypt(struct blkcipher_desc *desc,
271 struct scatterlist *dst,
272 struct scatterlist *src, unsigned int nbytes)
273{
274 struct crypto_blkcipher *tfm = desc->tfm;
275 struct crypto_rfc3686_ctx *ctx = crypto_blkcipher_ctx(tfm);
276 struct crypto_blkcipher *child = ctx->child;
277 unsigned long alignmask = crypto_blkcipher_alignmask(tfm);
278 u8 ivblk[CTR_RFC3686_BLOCK_SIZE + alignmask];
279 u8 *iv = PTR_ALIGN(ivblk + 0, alignmask + 1);
280 u8 *info = desc->info;
281 int err;
282
283 /* set up counter block */
284 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
285 memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE);
286
287 /* initialize counter portion of counter block */
288 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
289 cpu_to_be32(1);
290
291 desc->tfm = child;
292 desc->info = iv;
293 err = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
294 desc->tfm = tfm;
295 desc->info = info;
296
297 return err;
298}
299
300static int crypto_rfc3686_init_tfm(struct crypto_tfm *tfm)
301{
302 struct crypto_instance *inst = (void *)tfm->__crt_alg;
303 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
304 struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
305 struct crypto_blkcipher *cipher;
306
307 cipher = crypto_spawn_blkcipher(spawn);
308 if (IS_ERR(cipher))
309 return PTR_ERR(cipher);
310
311 ctx->child = cipher;
312
313 return 0;
314}
315
316static void crypto_rfc3686_exit_tfm(struct crypto_tfm *tfm)
317{
318 struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
319
320 crypto_free_blkcipher(ctx->child);
321}
322
323static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb)
324{
325 struct crypto_instance *inst;
326 struct crypto_alg *alg;
327 int err;
328
329 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
330 if (err)
331 return ERR_PTR(err);
332
333 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER,
334 CRYPTO_ALG_TYPE_MASK);
335 err = PTR_ERR(alg);
336 if (IS_ERR(alg))
337 return ERR_PTR(err);
338
339 /* We only support 16-byte blocks. */
340 err = -EINVAL;
341 if (alg->cra_blkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE)
342 goto out_put_alg;
343
344 /* Not a stream cipher? */
345 if (alg->cra_blocksize != 1)
346 goto out_put_alg;
347
348 inst = crypto_alloc_instance("rfc3686", alg);
349 if (IS_ERR(inst))
350 goto out;
351
352 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
353 inst->alg.cra_priority = alg->cra_priority;
354 inst->alg.cra_blocksize = 1;
355 inst->alg.cra_alignmask = alg->cra_alignmask;
356 inst->alg.cra_type = &crypto_blkcipher_type;
357
358 inst->alg.cra_blkcipher.ivsize = CTR_RFC3686_IV_SIZE;
359 inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize
360 + CTR_RFC3686_NONCE_SIZE;
361 inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize
362 + CTR_RFC3686_NONCE_SIZE;
363
364 inst->alg.cra_blkcipher.geniv = "seqiv";
365
366 inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
367
368 inst->alg.cra_init = crypto_rfc3686_init_tfm;
369 inst->alg.cra_exit = crypto_rfc3686_exit_tfm;
370
371 inst->alg.cra_blkcipher.setkey = crypto_rfc3686_setkey;
372 inst->alg.cra_blkcipher.encrypt = crypto_rfc3686_crypt;
373 inst->alg.cra_blkcipher.decrypt = crypto_rfc3686_crypt;
374
375out:
376 crypto_mod_put(alg);
377 return inst;
378
379out_put_alg:
380 inst = ERR_PTR(err);
381 goto out;
382}
383
384static struct crypto_template crypto_rfc3686_tmpl = {
385 .name = "rfc3686",
386 .alloc = crypto_rfc3686_alloc,
387 .free = crypto_ctr_free,
388 .module = THIS_MODULE,
389};
390
391static int __init crypto_ctr_module_init(void)
392{
393 int err;
394
395 err = crypto_register_template(&crypto_ctr_tmpl);
396 if (err)
397 goto out;
398
399 err = crypto_register_template(&crypto_rfc3686_tmpl);
400 if (err)
401 goto out_drop_ctr;
402
403out:
404 return err;
405
406out_drop_ctr:
407 crypto_unregister_template(&crypto_ctr_tmpl);
408 goto out;
409}
410
411static void __exit crypto_ctr_module_exit(void)
412{
413 crypto_unregister_template(&crypto_rfc3686_tmpl);
414 crypto_unregister_template(&crypto_ctr_tmpl);
415}
416
417module_init(crypto_ctr_module_init);
418module_exit(crypto_ctr_module_exit);
419
420MODULE_LICENSE("GPL");
421MODULE_DESCRIPTION("CTR Counter block mode");
422MODULE_ALIAS("rfc3686");
diff --git a/crypto/des_generic.c b/crypto/des_generic.c
index 59966d14b8e0..355ecb71cb0d 100644
--- a/crypto/des_generic.c
+++ b/crypto/des_generic.c
@@ -20,13 +20,7 @@
20#include <linux/crypto.h> 20#include <linux/crypto.h>
21#include <linux/types.h> 21#include <linux/types.h>
22 22
23#define DES_KEY_SIZE 8 23#include <crypto/des.h>
24#define DES_EXPKEY_WORDS 32
25#define DES_BLOCK_SIZE 8
26
27#define DES3_EDE_KEY_SIZE (3 * DES_KEY_SIZE)
28#define DES3_EDE_EXPKEY_WORDS (3 * DES_EXPKEY_WORDS)
29#define DES3_EDE_BLOCK_SIZE DES_BLOCK_SIZE
30 24
31#define ROL(x, r) ((x) = rol32((x), (r))) 25#define ROL(x, r) ((x) = rol32((x), (r)))
32#define ROR(x, r) ((x) = ror32((x), (r))) 26#define ROR(x, r) ((x) = ror32((x), (r)))
@@ -634,7 +628,7 @@ static const u32 S8[64] = {
634 * Choice 1 has operated on the key. 628 * Choice 1 has operated on the key.
635 * 629 *
636 */ 630 */
637static unsigned long ekey(u32 *pe, const u8 *k) 631unsigned long des_ekey(u32 *pe, const u8 *k)
638{ 632{
639 /* K&R: long is at least 32 bits */ 633 /* K&R: long is at least 32 bits */
640 unsigned long a, b, c, d, w; 634 unsigned long a, b, c, d, w;
@@ -709,6 +703,7 @@ static unsigned long ekey(u32 *pe, const u8 *k)
709 /* Zero if weak key */ 703 /* Zero if weak key */
710 return w; 704 return w;
711} 705}
706EXPORT_SYMBOL_GPL(des_ekey);
712 707
713/* 708/*
714 * Decryption key expansion 709 * Decryption key expansion
@@ -792,7 +787,7 @@ static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
792 int ret; 787 int ret;
793 788
794 /* Expand to tmp */ 789 /* Expand to tmp */
795 ret = ekey(tmp, key); 790 ret = des_ekey(tmp, key);
796 791
797 if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) { 792 if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
798 *flags |= CRYPTO_TFM_RES_WEAK_KEY; 793 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
@@ -879,9 +874,9 @@ static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key,
879 return -EINVAL; 874 return -EINVAL;
880 } 875 }
881 876
882 ekey(expkey, key); expkey += DES_EXPKEY_WORDS; key += DES_KEY_SIZE; 877 des_ekey(expkey, key); expkey += DES_EXPKEY_WORDS; key += DES_KEY_SIZE;
883 dkey(expkey, key); expkey += DES_EXPKEY_WORDS; key += DES_KEY_SIZE; 878 dkey(expkey, key); expkey += DES_EXPKEY_WORDS; key += DES_KEY_SIZE;
884 ekey(expkey, key); 879 des_ekey(expkey, key);
885 880
886 return 0; 881 return 0;
887} 882}
diff --git a/crypto/digest.c b/crypto/digest.c
index 8871dec8cae7..6fd43bddd545 100644
--- a/crypto/digest.c
+++ b/crypto/digest.c
@@ -12,6 +12,7 @@
12 * 12 *
13 */ 13 */
14 14
15#include <crypto/scatterwalk.h>
15#include <linux/mm.h> 16#include <linux/mm.h>
16#include <linux/errno.h> 17#include <linux/errno.h>
17#include <linux/hardirq.h> 18#include <linux/hardirq.h>
@@ -20,9 +21,6 @@
20#include <linux/module.h> 21#include <linux/module.h>
21#include <linux/scatterlist.h> 22#include <linux/scatterlist.h>
22 23
23#include "internal.h"
24#include "scatterwalk.h"
25
26static int init(struct hash_desc *desc) 24static int init(struct hash_desc *desc)
27{ 25{
28 struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm); 26 struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm);
diff --git a/crypto/eseqiv.c b/crypto/eseqiv.c
new file mode 100644
index 000000000000..eb90d27ae118
--- /dev/null
+++ b/crypto/eseqiv.c
@@ -0,0 +1,264 @@
1/*
2 * eseqiv: Encrypted Sequence Number IV Generator
3 *
4 * This generator generates an IV based on a sequence number by xoring it
5 * with a salt and then encrypting it with the same key as used to encrypt
6 * the plain text. This algorithm requires that the block size be equal
7 * to the IV size. It is mainly useful for CBC.
8 *
9 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17
18#include <crypto/internal/skcipher.h>
19#include <crypto/scatterwalk.h>
20#include <linux/err.h>
21#include <linux/init.h>
22#include <linux/kernel.h>
23#include <linux/mm.h>
24#include <linux/module.h>
25#include <linux/random.h>
26#include <linux/scatterlist.h>
27#include <linux/spinlock.h>
28#include <linux/string.h>
29
30struct eseqiv_request_ctx {
31 struct scatterlist src[2];
32 struct scatterlist dst[2];
33 char tail[];
34};
35
36struct eseqiv_ctx {
37 spinlock_t lock;
38 unsigned int reqoff;
39 char salt[];
40};
41
42static void eseqiv_complete2(struct skcipher_givcrypt_request *req)
43{
44 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
45 struct eseqiv_request_ctx *reqctx = skcipher_givcrypt_reqctx(req);
46
47 memcpy(req->giv, PTR_ALIGN((u8 *)reqctx->tail,
48 crypto_ablkcipher_alignmask(geniv) + 1),
49 crypto_ablkcipher_ivsize(geniv));
50}
51
52static void eseqiv_complete(struct crypto_async_request *base, int err)
53{
54 struct skcipher_givcrypt_request *req = base->data;
55
56 if (err)
57 goto out;
58
59 eseqiv_complete2(req);
60
61out:
62 skcipher_givcrypt_complete(req, err);
63}
64
65static void eseqiv_chain(struct scatterlist *head, struct scatterlist *sg,
66 int chain)
67{
68 if (chain) {
69 head->length += sg->length;
70 sg = scatterwalk_sg_next(sg);
71 }
72
73 if (sg)
74 scatterwalk_sg_chain(head, 2, sg);
75 else
76 sg_mark_end(head);
77}
78
79static int eseqiv_givencrypt(struct skcipher_givcrypt_request *req)
80{
81 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
82 struct eseqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
83 struct eseqiv_request_ctx *reqctx = skcipher_givcrypt_reqctx(req);
84 struct ablkcipher_request *subreq;
85 crypto_completion_t complete;
86 void *data;
87 struct scatterlist *osrc, *odst;
88 struct scatterlist *dst;
89 struct page *srcp;
90 struct page *dstp;
91 u8 *giv;
92 u8 *vsrc;
93 u8 *vdst;
94 __be64 seq;
95 unsigned int ivsize;
96 unsigned int len;
97 int err;
98
99 subreq = (void *)(reqctx->tail + ctx->reqoff);
100 ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
101
102 giv = req->giv;
103 complete = req->creq.base.complete;
104 data = req->creq.base.data;
105
106 osrc = req->creq.src;
107 odst = req->creq.dst;
108 srcp = sg_page(osrc);
109 dstp = sg_page(odst);
110 vsrc = PageHighMem(srcp) ? NULL : page_address(srcp) + osrc->offset;
111 vdst = PageHighMem(dstp) ? NULL : page_address(dstp) + odst->offset;
112
113 ivsize = crypto_ablkcipher_ivsize(geniv);
114
115 if (vsrc != giv + ivsize && vdst != giv + ivsize) {
116 giv = PTR_ALIGN((u8 *)reqctx->tail,
117 crypto_ablkcipher_alignmask(geniv) + 1);
118 complete = eseqiv_complete;
119 data = req;
120 }
121
122 ablkcipher_request_set_callback(subreq, req->creq.base.flags, complete,
123 data);
124
125 sg_init_table(reqctx->src, 2);
126 sg_set_buf(reqctx->src, giv, ivsize);
127 eseqiv_chain(reqctx->src, osrc, vsrc == giv + ivsize);
128
129 dst = reqctx->src;
130 if (osrc != odst) {
131 sg_init_table(reqctx->dst, 2);
132 sg_set_buf(reqctx->dst, giv, ivsize);
133 eseqiv_chain(reqctx->dst, odst, vdst == giv + ivsize);
134
135 dst = reqctx->dst;
136 }
137
138 ablkcipher_request_set_crypt(subreq, reqctx->src, dst,
139 req->creq.nbytes, req->creq.info);
140
141 memcpy(req->creq.info, ctx->salt, ivsize);
142
143 len = ivsize;
144 if (ivsize > sizeof(u64)) {
145 memset(req->giv, 0, ivsize - sizeof(u64));
146 len = sizeof(u64);
147 }
148 seq = cpu_to_be64(req->seq);
149 memcpy(req->giv + ivsize - len, &seq, len);
150
151 err = crypto_ablkcipher_encrypt(subreq);
152 if (err)
153 goto out;
154
155 eseqiv_complete2(req);
156
157out:
158 return err;
159}
160
161static int eseqiv_givencrypt_first(struct skcipher_givcrypt_request *req)
162{
163 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
164 struct eseqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
165
166 spin_lock_bh(&ctx->lock);
167 if (crypto_ablkcipher_crt(geniv)->givencrypt != eseqiv_givencrypt_first)
168 goto unlock;
169
170 crypto_ablkcipher_crt(geniv)->givencrypt = eseqiv_givencrypt;
171 get_random_bytes(ctx->salt, crypto_ablkcipher_ivsize(geniv));
172
173unlock:
174 spin_unlock_bh(&ctx->lock);
175
176 return eseqiv_givencrypt(req);
177}
178
179static int eseqiv_init(struct crypto_tfm *tfm)
180{
181 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm);
182 struct eseqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
183 unsigned long alignmask;
184 unsigned int reqsize;
185
186 spin_lock_init(&ctx->lock);
187
188 alignmask = crypto_tfm_ctx_alignment() - 1;
189 reqsize = sizeof(struct eseqiv_request_ctx);
190
191 if (alignmask & reqsize) {
192 alignmask &= reqsize;
193 alignmask--;
194 }
195
196 alignmask = ~alignmask;
197 alignmask &= crypto_ablkcipher_alignmask(geniv);
198
199 reqsize += alignmask;
200 reqsize += crypto_ablkcipher_ivsize(geniv);
201 reqsize = ALIGN(reqsize, crypto_tfm_ctx_alignment());
202
203 ctx->reqoff = reqsize - sizeof(struct eseqiv_request_ctx);
204
205 tfm->crt_ablkcipher.reqsize = reqsize +
206 sizeof(struct ablkcipher_request);
207
208 return skcipher_geniv_init(tfm);
209}
210
211static struct crypto_template eseqiv_tmpl;
212
213static struct crypto_instance *eseqiv_alloc(struct rtattr **tb)
214{
215 struct crypto_instance *inst;
216 int err;
217
218 inst = skcipher_geniv_alloc(&eseqiv_tmpl, tb, 0, 0);
219 if (IS_ERR(inst))
220 goto out;
221
222 err = -EINVAL;
223 if (inst->alg.cra_ablkcipher.ivsize != inst->alg.cra_blocksize)
224 goto free_inst;
225
226 inst->alg.cra_ablkcipher.givencrypt = eseqiv_givencrypt_first;
227
228 inst->alg.cra_init = eseqiv_init;
229 inst->alg.cra_exit = skcipher_geniv_exit;
230
231 inst->alg.cra_ctxsize = sizeof(struct eseqiv_ctx);
232 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
233
234out:
235 return inst;
236
237free_inst:
238 skcipher_geniv_free(inst);
239 inst = ERR_PTR(err);
240 goto out;
241}
242
243static struct crypto_template eseqiv_tmpl = {
244 .name = "eseqiv",
245 .alloc = eseqiv_alloc,
246 .free = skcipher_geniv_free,
247 .module = THIS_MODULE,
248};
249
250static int __init eseqiv_module_init(void)
251{
252 return crypto_register_template(&eseqiv_tmpl);
253}
254
255static void __exit eseqiv_module_exit(void)
256{
257 crypto_unregister_template(&eseqiv_tmpl);
258}
259
260module_init(eseqiv_module_init);
261module_exit(eseqiv_module_exit);
262
263MODULE_LICENSE("GPL");
264MODULE_DESCRIPTION("Encrypted Sequence Number IV Generator");
diff --git a/crypto/gcm.c b/crypto/gcm.c
new file mode 100644
index 000000000000..e70afd0c73dd
--- /dev/null
+++ b/crypto/gcm.c
@@ -0,0 +1,823 @@
1/*
2 * GCM: Galois/Counter Mode.
3 *
4 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published
8 * by the Free Software Foundation.
9 */
10
11#include <crypto/gf128mul.h>
12#include <crypto/internal/aead.h>
13#include <crypto/internal/skcipher.h>
14#include <crypto/scatterwalk.h>
15#include <linux/completion.h>
16#include <linux/err.h>
17#include <linux/init.h>
18#include <linux/kernel.h>
19#include <linux/module.h>
20#include <linux/slab.h>
21
22struct gcm_instance_ctx {
23 struct crypto_skcipher_spawn ctr;
24};
25
26struct crypto_gcm_ctx {
27 struct crypto_ablkcipher *ctr;
28 struct gf128mul_4k *gf128;
29};
30
31struct crypto_rfc4106_ctx {
32 struct crypto_aead *child;
33 u8 nonce[4];
34};
35
36struct crypto_gcm_ghash_ctx {
37 u32 bytes;
38 u32 flags;
39 struct gf128mul_4k *gf128;
40 u8 buffer[16];
41};
42
43struct crypto_gcm_req_priv_ctx {
44 u8 auth_tag[16];
45 u8 iauth_tag[16];
46 struct scatterlist src[2];
47 struct scatterlist dst[2];
48 struct crypto_gcm_ghash_ctx ghash;
49 struct ablkcipher_request abreq;
50};
51
52struct crypto_gcm_setkey_result {
53 int err;
54 struct completion completion;
55};
56
57static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
58 struct aead_request *req)
59{
60 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
61
62 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
63}
64
65static void crypto_gcm_ghash_init(struct crypto_gcm_ghash_ctx *ctx, u32 flags,
66 struct gf128mul_4k *gf128)
67{
68 ctx->bytes = 0;
69 ctx->flags = flags;
70 ctx->gf128 = gf128;
71 memset(ctx->buffer, 0, 16);
72}
73
74static void crypto_gcm_ghash_update(struct crypto_gcm_ghash_ctx *ctx,
75 const u8 *src, unsigned int srclen)
76{
77 u8 *dst = ctx->buffer;
78
79 if (ctx->bytes) {
80 int n = min(srclen, ctx->bytes);
81 u8 *pos = dst + (16 - ctx->bytes);
82
83 ctx->bytes -= n;
84 srclen -= n;
85
86 while (n--)
87 *pos++ ^= *src++;
88
89 if (!ctx->bytes)
90 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
91 }
92
93 while (srclen >= 16) {
94 crypto_xor(dst, src, 16);
95 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
96 src += 16;
97 srclen -= 16;
98 }
99
100 if (srclen) {
101 ctx->bytes = 16 - srclen;
102 while (srclen--)
103 *dst++ ^= *src++;
104 }
105}
106
107static void crypto_gcm_ghash_update_sg(struct crypto_gcm_ghash_ctx *ctx,
108 struct scatterlist *sg, int len)
109{
110 struct scatter_walk walk;
111 u8 *src;
112 int n;
113
114 if (!len)
115 return;
116
117 scatterwalk_start(&walk, sg);
118
119 while (len) {
120 n = scatterwalk_clamp(&walk, len);
121
122 if (!n) {
123 scatterwalk_start(&walk, scatterwalk_sg_next(walk.sg));
124 n = scatterwalk_clamp(&walk, len);
125 }
126
127 src = scatterwalk_map(&walk, 0);
128
129 crypto_gcm_ghash_update(ctx, src, n);
130 len -= n;
131
132 scatterwalk_unmap(src, 0);
133 scatterwalk_advance(&walk, n);
134 scatterwalk_done(&walk, 0, len);
135 if (len)
136 crypto_yield(ctx->flags);
137 }
138}
139
140static void crypto_gcm_ghash_flush(struct crypto_gcm_ghash_ctx *ctx)
141{
142 u8 *dst = ctx->buffer;
143
144 if (ctx->bytes) {
145 u8 *tmp = dst + (16 - ctx->bytes);
146
147 while (ctx->bytes--)
148 *tmp++ ^= 0;
149
150 gf128mul_4k_lle((be128 *)dst, ctx->gf128);
151 }
152
153 ctx->bytes = 0;
154}
155
156static void crypto_gcm_ghash_final_xor(struct crypto_gcm_ghash_ctx *ctx,
157 unsigned int authlen,
158 unsigned int cryptlen, u8 *dst)
159{
160 u8 *buf = ctx->buffer;
161 u128 lengths;
162
163 lengths.a = cpu_to_be64(authlen * 8);
164 lengths.b = cpu_to_be64(cryptlen * 8);
165
166 crypto_gcm_ghash_flush(ctx);
167 crypto_xor(buf, (u8 *)&lengths, 16);
168 gf128mul_4k_lle((be128 *)buf, ctx->gf128);
169 crypto_xor(dst, buf, 16);
170}
171
172static void crypto_gcm_setkey_done(struct crypto_async_request *req, int err)
173{
174 struct crypto_gcm_setkey_result *result = req->data;
175
176 if (err == -EINPROGRESS)
177 return;
178
179 result->err = err;
180 complete(&result->completion);
181}
182
183static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
184 unsigned int keylen)
185{
186 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
187 struct crypto_ablkcipher *ctr = ctx->ctr;
188 struct {
189 be128 hash;
190 u8 iv[8];
191
192 struct crypto_gcm_setkey_result result;
193
194 struct scatterlist sg[1];
195 struct ablkcipher_request req;
196 } *data;
197 int err;
198
199 crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
200 crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
201 CRYPTO_TFM_REQ_MASK);
202
203 err = crypto_ablkcipher_setkey(ctr, key, keylen);
204 if (err)
205 return err;
206
207 crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
208 CRYPTO_TFM_RES_MASK);
209
210 data = kzalloc(sizeof(*data) + crypto_ablkcipher_reqsize(ctr),
211 GFP_KERNEL);
212 if (!data)
213 return -ENOMEM;
214
215 init_completion(&data->result.completion);
216 sg_init_one(data->sg, &data->hash, sizeof(data->hash));
217 ablkcipher_request_set_tfm(&data->req, ctr);
218 ablkcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
219 CRYPTO_TFM_REQ_MAY_BACKLOG,
220 crypto_gcm_setkey_done,
221 &data->result);
222 ablkcipher_request_set_crypt(&data->req, data->sg, data->sg,
223 sizeof(data->hash), data->iv);
224
225 err = crypto_ablkcipher_encrypt(&data->req);
226 if (err == -EINPROGRESS || err == -EBUSY) {
227 err = wait_for_completion_interruptible(
228 &data->result.completion);
229 if (!err)
230 err = data->result.err;
231 }
232
233 if (err)
234 goto out;
235
236 if (ctx->gf128 != NULL)
237 gf128mul_free_4k(ctx->gf128);
238
239 ctx->gf128 = gf128mul_init_4k_lle(&data->hash);
240
241 if (ctx->gf128 == NULL)
242 err = -ENOMEM;
243
244out:
245 kfree(data);
246 return err;
247}
248
249static int crypto_gcm_setauthsize(struct crypto_aead *tfm,
250 unsigned int authsize)
251{
252 switch (authsize) {
253 case 4:
254 case 8:
255 case 12:
256 case 13:
257 case 14:
258 case 15:
259 case 16:
260 break;
261 default:
262 return -EINVAL;
263 }
264
265 return 0;
266}
267
268static void crypto_gcm_init_crypt(struct ablkcipher_request *ablk_req,
269 struct aead_request *req,
270 unsigned int cryptlen)
271{
272 struct crypto_aead *aead = crypto_aead_reqtfm(req);
273 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
274 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
275 u32 flags = req->base.tfm->crt_flags;
276 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
277 struct scatterlist *dst;
278 __be32 counter = cpu_to_be32(1);
279
280 memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
281 memcpy(req->iv + 12, &counter, 4);
282
283 sg_init_table(pctx->src, 2);
284 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
285 scatterwalk_sg_chain(pctx->src, 2, req->src);
286
287 dst = pctx->src;
288 if (req->src != req->dst) {
289 sg_init_table(pctx->dst, 2);
290 sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
291 scatterwalk_sg_chain(pctx->dst, 2, req->dst);
292 dst = pctx->dst;
293 }
294
295 ablkcipher_request_set_tfm(ablk_req, ctx->ctr);
296 ablkcipher_request_set_crypt(ablk_req, pctx->src, dst,
297 cryptlen + sizeof(pctx->auth_tag),
298 req->iv);
299
300 crypto_gcm_ghash_init(ghash, flags, ctx->gf128);
301
302 crypto_gcm_ghash_update_sg(ghash, req->assoc, req->assoclen);
303 crypto_gcm_ghash_flush(ghash);
304}
305
306static int crypto_gcm_hash(struct aead_request *req)
307{
308 struct crypto_aead *aead = crypto_aead_reqtfm(req);
309 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
310 u8 *auth_tag = pctx->auth_tag;
311 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
312
313 crypto_gcm_ghash_update_sg(ghash, req->dst, req->cryptlen);
314 crypto_gcm_ghash_final_xor(ghash, req->assoclen, req->cryptlen,
315 auth_tag);
316
317 scatterwalk_map_and_copy(auth_tag, req->dst, req->cryptlen,
318 crypto_aead_authsize(aead), 1);
319 return 0;
320}
321
322static void crypto_gcm_encrypt_done(struct crypto_async_request *areq, int err)
323{
324 struct aead_request *req = areq->data;
325
326 if (!err)
327 err = crypto_gcm_hash(req);
328
329 aead_request_complete(req, err);
330}
331
332static int crypto_gcm_encrypt(struct aead_request *req)
333{
334 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
335 struct ablkcipher_request *abreq = &pctx->abreq;
336 int err;
337
338 crypto_gcm_init_crypt(abreq, req, req->cryptlen);
339 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
340 crypto_gcm_encrypt_done, req);
341
342 err = crypto_ablkcipher_encrypt(abreq);
343 if (err)
344 return err;
345
346 return crypto_gcm_hash(req);
347}
348
349static int crypto_gcm_verify(struct aead_request *req)
350{
351 struct crypto_aead *aead = crypto_aead_reqtfm(req);
352 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
353 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
354 u8 *auth_tag = pctx->auth_tag;
355 u8 *iauth_tag = pctx->iauth_tag;
356 unsigned int authsize = crypto_aead_authsize(aead);
357 unsigned int cryptlen = req->cryptlen - authsize;
358
359 crypto_gcm_ghash_final_xor(ghash, req->assoclen, cryptlen, auth_tag);
360
361 authsize = crypto_aead_authsize(aead);
362 scatterwalk_map_and_copy(iauth_tag, req->src, cryptlen, authsize, 0);
363 return memcmp(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
364}
365
366static void crypto_gcm_decrypt_done(struct crypto_async_request *areq, int err)
367{
368 struct aead_request *req = areq->data;
369
370 if (!err)
371 err = crypto_gcm_verify(req);
372
373 aead_request_complete(req, err);
374}
375
376static int crypto_gcm_decrypt(struct aead_request *req)
377{
378 struct crypto_aead *aead = crypto_aead_reqtfm(req);
379 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
380 struct ablkcipher_request *abreq = &pctx->abreq;
381 struct crypto_gcm_ghash_ctx *ghash = &pctx->ghash;
382 unsigned int cryptlen = req->cryptlen;
383 unsigned int authsize = crypto_aead_authsize(aead);
384 int err;
385
386 if (cryptlen < authsize)
387 return -EINVAL;
388 cryptlen -= authsize;
389
390 crypto_gcm_init_crypt(abreq, req, cryptlen);
391 ablkcipher_request_set_callback(abreq, aead_request_flags(req),
392 crypto_gcm_decrypt_done, req);
393
394 crypto_gcm_ghash_update_sg(ghash, req->src, cryptlen);
395
396 err = crypto_ablkcipher_decrypt(abreq);
397 if (err)
398 return err;
399
400 return crypto_gcm_verify(req);
401}
402
403static int crypto_gcm_init_tfm(struct crypto_tfm *tfm)
404{
405 struct crypto_instance *inst = (void *)tfm->__crt_alg;
406 struct gcm_instance_ctx *ictx = crypto_instance_ctx(inst);
407 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
408 struct crypto_ablkcipher *ctr;
409 unsigned long align;
410 int err;
411
412 ctr = crypto_spawn_skcipher(&ictx->ctr);
413 err = PTR_ERR(ctr);
414 if (IS_ERR(ctr))
415 return err;
416
417 ctx->ctr = ctr;
418 ctx->gf128 = NULL;
419
420 align = crypto_tfm_alg_alignmask(tfm);
421 align &= ~(crypto_tfm_ctx_alignment() - 1);
422 tfm->crt_aead.reqsize = align +
423 sizeof(struct crypto_gcm_req_priv_ctx) +
424 crypto_ablkcipher_reqsize(ctr);
425
426 return 0;
427}
428
429static void crypto_gcm_exit_tfm(struct crypto_tfm *tfm)
430{
431 struct crypto_gcm_ctx *ctx = crypto_tfm_ctx(tfm);
432
433 if (ctx->gf128 != NULL)
434 gf128mul_free_4k(ctx->gf128);
435
436 crypto_free_ablkcipher(ctx->ctr);
437}
438
439static struct crypto_instance *crypto_gcm_alloc_common(struct rtattr **tb,
440 const char *full_name,
441 const char *ctr_name)
442{
443 struct crypto_attr_type *algt;
444 struct crypto_instance *inst;
445 struct crypto_alg *ctr;
446 struct gcm_instance_ctx *ctx;
447 int err;
448
449 algt = crypto_get_attr_type(tb);
450 err = PTR_ERR(algt);
451 if (IS_ERR(algt))
452 return ERR_PTR(err);
453
454 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
455 return ERR_PTR(-EINVAL);
456
457 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
458 if (!inst)
459 return ERR_PTR(-ENOMEM);
460
461 ctx = crypto_instance_ctx(inst);
462 crypto_set_skcipher_spawn(&ctx->ctr, inst);
463 err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0,
464 crypto_requires_sync(algt->type,
465 algt->mask));
466 if (err)
467 goto err_free_inst;
468
469 ctr = crypto_skcipher_spawn_alg(&ctx->ctr);
470
471 /* We only support 16-byte blocks. */
472 if (ctr->cra_ablkcipher.ivsize != 16)
473 goto out_put_ctr;
474
475 /* Not a stream cipher? */
476 err = -EINVAL;
477 if (ctr->cra_blocksize != 1)
478 goto out_put_ctr;
479
480 err = -ENAMETOOLONG;
481 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
482 "gcm_base(%s)", ctr->cra_driver_name) >=
483 CRYPTO_MAX_ALG_NAME)
484 goto out_put_ctr;
485
486 memcpy(inst->alg.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
487
488 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
489 inst->alg.cra_flags |= ctr->cra_flags & CRYPTO_ALG_ASYNC;
490 inst->alg.cra_priority = ctr->cra_priority;
491 inst->alg.cra_blocksize = 1;
492 inst->alg.cra_alignmask = ctr->cra_alignmask | (__alignof__(u64) - 1);
493 inst->alg.cra_type = &crypto_aead_type;
494 inst->alg.cra_aead.ivsize = 16;
495 inst->alg.cra_aead.maxauthsize = 16;
496 inst->alg.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
497 inst->alg.cra_init = crypto_gcm_init_tfm;
498 inst->alg.cra_exit = crypto_gcm_exit_tfm;
499 inst->alg.cra_aead.setkey = crypto_gcm_setkey;
500 inst->alg.cra_aead.setauthsize = crypto_gcm_setauthsize;
501 inst->alg.cra_aead.encrypt = crypto_gcm_encrypt;
502 inst->alg.cra_aead.decrypt = crypto_gcm_decrypt;
503
504out:
505 return inst;
506
507out_put_ctr:
508 crypto_drop_skcipher(&ctx->ctr);
509err_free_inst:
510 kfree(inst);
511 inst = ERR_PTR(err);
512 goto out;
513}
514
515static struct crypto_instance *crypto_gcm_alloc(struct rtattr **tb)
516{
517 int err;
518 const char *cipher_name;
519 char ctr_name[CRYPTO_MAX_ALG_NAME];
520 char full_name[CRYPTO_MAX_ALG_NAME];
521
522 cipher_name = crypto_attr_alg_name(tb[1]);
523 err = PTR_ERR(cipher_name);
524 if (IS_ERR(cipher_name))
525 return ERR_PTR(err);
526
527 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
528 CRYPTO_MAX_ALG_NAME)
529 return ERR_PTR(-ENAMETOOLONG);
530
531 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm(%s)", cipher_name) >=
532 CRYPTO_MAX_ALG_NAME)
533 return ERR_PTR(-ENAMETOOLONG);
534
535 return crypto_gcm_alloc_common(tb, full_name, ctr_name);
536}
537
538static void crypto_gcm_free(struct crypto_instance *inst)
539{
540 struct gcm_instance_ctx *ctx = crypto_instance_ctx(inst);
541
542 crypto_drop_skcipher(&ctx->ctr);
543 kfree(inst);
544}
545
546static struct crypto_template crypto_gcm_tmpl = {
547 .name = "gcm",
548 .alloc = crypto_gcm_alloc,
549 .free = crypto_gcm_free,
550 .module = THIS_MODULE,
551};
552
553static struct crypto_instance *crypto_gcm_base_alloc(struct rtattr **tb)
554{
555 int err;
556 const char *ctr_name;
557 char full_name[CRYPTO_MAX_ALG_NAME];
558
559 ctr_name = crypto_attr_alg_name(tb[1]);
560 err = PTR_ERR(ctr_name);
561 if (IS_ERR(ctr_name))
562 return ERR_PTR(err);
563
564 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "gcm_base(%s)",
565 ctr_name) >= CRYPTO_MAX_ALG_NAME)
566 return ERR_PTR(-ENAMETOOLONG);
567
568 return crypto_gcm_alloc_common(tb, full_name, ctr_name);
569}
570
571static struct crypto_template crypto_gcm_base_tmpl = {
572 .name = "gcm_base",
573 .alloc = crypto_gcm_base_alloc,
574 .free = crypto_gcm_free,
575 .module = THIS_MODULE,
576};
577
578static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key,
579 unsigned int keylen)
580{
581 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
582 struct crypto_aead *child = ctx->child;
583 int err;
584
585 if (keylen < 4)
586 return -EINVAL;
587
588 keylen -= 4;
589 memcpy(ctx->nonce, key + keylen, 4);
590
591 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
592 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
593 CRYPTO_TFM_REQ_MASK);
594 err = crypto_aead_setkey(child, key, keylen);
595 crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
596 CRYPTO_TFM_RES_MASK);
597
598 return err;
599}
600
601static int crypto_rfc4106_setauthsize(struct crypto_aead *parent,
602 unsigned int authsize)
603{
604 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
605
606 switch (authsize) {
607 case 8:
608 case 12:
609 case 16:
610 break;
611 default:
612 return -EINVAL;
613 }
614
615 return crypto_aead_setauthsize(ctx->child, authsize);
616}
617
618static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req)
619{
620 struct aead_request *subreq = aead_request_ctx(req);
621 struct crypto_aead *aead = crypto_aead_reqtfm(req);
622 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead);
623 struct crypto_aead *child = ctx->child;
624 u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
625 crypto_aead_alignmask(child) + 1);
626
627 memcpy(iv, ctx->nonce, 4);
628 memcpy(iv + 4, req->iv, 8);
629
630 aead_request_set_tfm(subreq, child);
631 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
632 req->base.data);
633 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, iv);
634 aead_request_set_assoc(subreq, req->assoc, req->assoclen);
635
636 return subreq;
637}
638
639static int crypto_rfc4106_encrypt(struct aead_request *req)
640{
641 req = crypto_rfc4106_crypt(req);
642
643 return crypto_aead_encrypt(req);
644}
645
646static int crypto_rfc4106_decrypt(struct aead_request *req)
647{
648 req = crypto_rfc4106_crypt(req);
649
650 return crypto_aead_decrypt(req);
651}
652
653static int crypto_rfc4106_init_tfm(struct crypto_tfm *tfm)
654{
655 struct crypto_instance *inst = (void *)tfm->__crt_alg;
656 struct crypto_aead_spawn *spawn = crypto_instance_ctx(inst);
657 struct crypto_rfc4106_ctx *ctx = crypto_tfm_ctx(tfm);
658 struct crypto_aead *aead;
659 unsigned long align;
660
661 aead = crypto_spawn_aead(spawn);
662 if (IS_ERR(aead))
663 return PTR_ERR(aead);
664
665 ctx->child = aead;
666
667 align = crypto_aead_alignmask(aead);
668 align &= ~(crypto_tfm_ctx_alignment() - 1);
669 tfm->crt_aead.reqsize = sizeof(struct aead_request) +
670 ALIGN(crypto_aead_reqsize(aead),
671 crypto_tfm_ctx_alignment()) +
672 align + 16;
673
674 return 0;
675}
676
677static void crypto_rfc4106_exit_tfm(struct crypto_tfm *tfm)
678{
679 struct crypto_rfc4106_ctx *ctx = crypto_tfm_ctx(tfm);
680
681 crypto_free_aead(ctx->child);
682}
683
684static struct crypto_instance *crypto_rfc4106_alloc(struct rtattr **tb)
685{
686 struct crypto_attr_type *algt;
687 struct crypto_instance *inst;
688 struct crypto_aead_spawn *spawn;
689 struct crypto_alg *alg;
690 const char *ccm_name;
691 int err;
692
693 algt = crypto_get_attr_type(tb);
694 err = PTR_ERR(algt);
695 if (IS_ERR(algt))
696 return ERR_PTR(err);
697
698 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
699 return ERR_PTR(-EINVAL);
700
701 ccm_name = crypto_attr_alg_name(tb[1]);
702 err = PTR_ERR(ccm_name);
703 if (IS_ERR(ccm_name))
704 return ERR_PTR(err);
705
706 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
707 if (!inst)
708 return ERR_PTR(-ENOMEM);
709
710 spawn = crypto_instance_ctx(inst);
711 crypto_set_aead_spawn(spawn, inst);
712 err = crypto_grab_aead(spawn, ccm_name, 0,
713 crypto_requires_sync(algt->type, algt->mask));
714 if (err)
715 goto out_free_inst;
716
717 alg = crypto_aead_spawn_alg(spawn);
718
719 err = -EINVAL;
720
721 /* We only support 16-byte blocks. */
722 if (alg->cra_aead.ivsize != 16)
723 goto out_drop_alg;
724
725 /* Not a stream cipher? */
726 if (alg->cra_blocksize != 1)
727 goto out_drop_alg;
728
729 err = -ENAMETOOLONG;
730 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME,
731 "rfc4106(%s)", alg->cra_name) >= CRYPTO_MAX_ALG_NAME ||
732 snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
733 "rfc4106(%s)", alg->cra_driver_name) >=
734 CRYPTO_MAX_ALG_NAME)
735 goto out_drop_alg;
736
737 inst->alg.cra_flags = CRYPTO_ALG_TYPE_AEAD;
738 inst->alg.cra_flags |= alg->cra_flags & CRYPTO_ALG_ASYNC;
739 inst->alg.cra_priority = alg->cra_priority;
740 inst->alg.cra_blocksize = 1;
741 inst->alg.cra_alignmask = alg->cra_alignmask;
742 inst->alg.cra_type = &crypto_nivaead_type;
743
744 inst->alg.cra_aead.ivsize = 8;
745 inst->alg.cra_aead.maxauthsize = 16;
746
747 inst->alg.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx);
748
749 inst->alg.cra_init = crypto_rfc4106_init_tfm;
750 inst->alg.cra_exit = crypto_rfc4106_exit_tfm;
751
752 inst->alg.cra_aead.setkey = crypto_rfc4106_setkey;
753 inst->alg.cra_aead.setauthsize = crypto_rfc4106_setauthsize;
754 inst->alg.cra_aead.encrypt = crypto_rfc4106_encrypt;
755 inst->alg.cra_aead.decrypt = crypto_rfc4106_decrypt;
756
757 inst->alg.cra_aead.geniv = "seqiv";
758
759out:
760 return inst;
761
762out_drop_alg:
763 crypto_drop_aead(spawn);
764out_free_inst:
765 kfree(inst);
766 inst = ERR_PTR(err);
767 goto out;
768}
769
770static void crypto_rfc4106_free(struct crypto_instance *inst)
771{
772 crypto_drop_spawn(crypto_instance_ctx(inst));
773 kfree(inst);
774}
775
776static struct crypto_template crypto_rfc4106_tmpl = {
777 .name = "rfc4106",
778 .alloc = crypto_rfc4106_alloc,
779 .free = crypto_rfc4106_free,
780 .module = THIS_MODULE,
781};
782
783static int __init crypto_gcm_module_init(void)
784{
785 int err;
786
787 err = crypto_register_template(&crypto_gcm_base_tmpl);
788 if (err)
789 goto out;
790
791 err = crypto_register_template(&crypto_gcm_tmpl);
792 if (err)
793 goto out_undo_base;
794
795 err = crypto_register_template(&crypto_rfc4106_tmpl);
796 if (err)
797 goto out_undo_gcm;
798
799out:
800 return err;
801
802out_undo_gcm:
803 crypto_unregister_template(&crypto_gcm_tmpl);
804out_undo_base:
805 crypto_unregister_template(&crypto_gcm_base_tmpl);
806 goto out;
807}
808
809static void __exit crypto_gcm_module_exit(void)
810{
811 crypto_unregister_template(&crypto_rfc4106_tmpl);
812 crypto_unregister_template(&crypto_gcm_tmpl);
813 crypto_unregister_template(&crypto_gcm_base_tmpl);
814}
815
816module_init(crypto_gcm_module_init);
817module_exit(crypto_gcm_module_exit);
818
819MODULE_LICENSE("GPL");
820MODULE_DESCRIPTION("Galois/Counter Mode");
821MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
822MODULE_ALIAS("gcm_base");
823MODULE_ALIAS("rfc4106");
diff --git a/crypto/hmac.c b/crypto/hmac.c
index 0f05be769c34..a1d016a50e7d 100644
--- a/crypto/hmac.c
+++ b/crypto/hmac.c
@@ -17,6 +17,7 @@
17 */ 17 */
18 18
19#include <crypto/algapi.h> 19#include <crypto/algapi.h>
20#include <crypto/scatterwalk.h>
20#include <linux/err.h> 21#include <linux/err.h>
21#include <linux/init.h> 22#include <linux/init.h>
22#include <linux/kernel.h> 23#include <linux/kernel.h>
@@ -160,7 +161,7 @@ static int hmac_digest(struct hash_desc *pdesc, struct scatterlist *sg,
160 161
161 sg_init_table(sg1, 2); 162 sg_init_table(sg1, 2);
162 sg_set_buf(sg1, ipad, bs); 163 sg_set_buf(sg1, ipad, bs);
163 sg_set_page(&sg1[1], (void *) sg, 0, 0); 164 scatterwalk_sg_chain(sg1, 2, sg);
164 165
165 sg_init_table(sg2, 1); 166 sg_init_table(sg2, 1);
166 sg_set_buf(sg2, opad, bs + ds); 167 sg_set_buf(sg2, opad, bs + ds);
diff --git a/crypto/internal.h b/crypto/internal.h
index abb01f71f817..32f4c2145603 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -25,7 +25,6 @@
25#include <linux/notifier.h> 25#include <linux/notifier.h>
26#include <linux/rwsem.h> 26#include <linux/rwsem.h>
27#include <linux/slab.h> 27#include <linux/slab.h>
28#include <asm/kmap_types.h>
29 28
30/* Crypto notification events. */ 29/* Crypto notification events. */
31enum { 30enum {
@@ -50,34 +49,6 @@ extern struct list_head crypto_alg_list;
50extern struct rw_semaphore crypto_alg_sem; 49extern struct rw_semaphore crypto_alg_sem;
51extern struct blocking_notifier_head crypto_chain; 50extern struct blocking_notifier_head crypto_chain;
52 51
53static inline enum km_type crypto_kmap_type(int out)
54{
55 enum km_type type;
56
57 if (in_softirq())
58 type = out * (KM_SOFTIRQ1 - KM_SOFTIRQ0) + KM_SOFTIRQ0;
59 else
60 type = out * (KM_USER1 - KM_USER0) + KM_USER0;
61
62 return type;
63}
64
65static inline void *crypto_kmap(struct page *page, int out)
66{
67 return kmap_atomic(page, crypto_kmap_type(out));
68}
69
70static inline void crypto_kunmap(void *vaddr, int out)
71{
72 kunmap_atomic(vaddr, crypto_kmap_type(out));
73}
74
75static inline void crypto_yield(u32 flags)
76{
77 if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
78 cond_resched();
79}
80
81#ifdef CONFIG_PROC_FS 52#ifdef CONFIG_PROC_FS
82void __init crypto_init_proc(void); 53void __init crypto_init_proc(void);
83void __exit crypto_exit_proc(void); 54void __exit crypto_exit_proc(void);
@@ -122,6 +93,8 @@ void crypto_exit_digest_ops(struct crypto_tfm *tfm);
122void crypto_exit_cipher_ops(struct crypto_tfm *tfm); 93void crypto_exit_cipher_ops(struct crypto_tfm *tfm);
123void crypto_exit_compress_ops(struct crypto_tfm *tfm); 94void crypto_exit_compress_ops(struct crypto_tfm *tfm);
124 95
96void crypto_larval_kill(struct crypto_alg *alg);
97struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask);
125void crypto_larval_error(const char *name, u32 type, u32 mask); 98void crypto_larval_error(const char *name, u32 type, u32 mask);
126 99
127void crypto_shoot_alg(struct crypto_alg *alg); 100void crypto_shoot_alg(struct crypto_alg *alg);
diff --git a/crypto/lzo.c b/crypto/lzo.c
new file mode 100644
index 000000000000..48c32883f024
--- /dev/null
+++ b/crypto/lzo.c
@@ -0,0 +1,106 @@
1/*
2 * Cryptographic API.
3 *
4 * This program is free software; you can redistribute it and/or modify it
5 * under the terms of the GNU General Public License version 2 as published by
6 * the Free Software Foundation.
7 *
8 * This program is distributed in the hope that it will be useful, but WITHOUT
9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
11 * more details.
12 *
13 * You should have received a copy of the GNU General Public License along with
14 * this program; if not, write to the Free Software Foundation, Inc., 51
15 * Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
16 *
17 */
18
19#include <linux/init.h>
20#include <linux/module.h>
21#include <linux/crypto.h>
22#include <linux/vmalloc.h>
23#include <linux/lzo.h>
24
25struct lzo_ctx {
26 void *lzo_comp_mem;
27};
28
29static int lzo_init(struct crypto_tfm *tfm)
30{
31 struct lzo_ctx *ctx = crypto_tfm_ctx(tfm);
32
33 ctx->lzo_comp_mem = vmalloc(LZO1X_MEM_COMPRESS);
34 if (!ctx->lzo_comp_mem)
35 return -ENOMEM;
36
37 return 0;
38}
39
40static void lzo_exit(struct crypto_tfm *tfm)
41{
42 struct lzo_ctx *ctx = crypto_tfm_ctx(tfm);
43
44 vfree(ctx->lzo_comp_mem);
45}
46
47static int lzo_compress(struct crypto_tfm *tfm, const u8 *src,
48 unsigned int slen, u8 *dst, unsigned int *dlen)
49{
50 struct lzo_ctx *ctx = crypto_tfm_ctx(tfm);
51 size_t tmp_len = *dlen; /* size_t(ulong) <-> uint on 64 bit */
52 int err;
53
54 err = lzo1x_1_compress(src, slen, dst, &tmp_len, ctx->lzo_comp_mem);
55
56 if (err != LZO_E_OK)
57 return -EINVAL;
58
59 *dlen = tmp_len;
60 return 0;
61}
62
63static int lzo_decompress(struct crypto_tfm *tfm, const u8 *src,
64 unsigned int slen, u8 *dst, unsigned int *dlen)
65{
66 int err;
67 size_t tmp_len = *dlen; /* size_t(ulong) <-> uint on 64 bit */
68
69 err = lzo1x_decompress_safe(src, slen, dst, &tmp_len);
70
71 if (err != LZO_E_OK)
72 return -EINVAL;
73
74 *dlen = tmp_len;
75 return 0;
76
77}
78
79static struct crypto_alg alg = {
80 .cra_name = "lzo",
81 .cra_flags = CRYPTO_ALG_TYPE_COMPRESS,
82 .cra_ctxsize = sizeof(struct lzo_ctx),
83 .cra_module = THIS_MODULE,
84 .cra_list = LIST_HEAD_INIT(alg.cra_list),
85 .cra_init = lzo_init,
86 .cra_exit = lzo_exit,
87 .cra_u = { .compress = {
88 .coa_compress = lzo_compress,
89 .coa_decompress = lzo_decompress } }
90};
91
92static int __init init(void)
93{
94 return crypto_register_alg(&alg);
95}
96
97static void __exit fini(void)
98{
99 crypto_unregister_alg(&alg);
100}
101
102module_init(init);
103module_exit(fini);
104
105MODULE_LICENSE("GPL");
106MODULE_DESCRIPTION("LZO Compression Algorithm");
diff --git a/crypto/pcbc.c b/crypto/pcbc.c
index c3ed8a1c9f46..fe704775f88f 100644
--- a/crypto/pcbc.c
+++ b/crypto/pcbc.c
@@ -24,7 +24,6 @@
24 24
25struct crypto_pcbc_ctx { 25struct crypto_pcbc_ctx {
26 struct crypto_cipher *child; 26 struct crypto_cipher *child;
27 void (*xor)(u8 *dst, const u8 *src, unsigned int bs);
28}; 27};
29 28
30static int crypto_pcbc_setkey(struct crypto_tfm *parent, const u8 *key, 29static int crypto_pcbc_setkey(struct crypto_tfm *parent, const u8 *key,
@@ -45,9 +44,7 @@ static int crypto_pcbc_setkey(struct crypto_tfm *parent, const u8 *key,
45 44
46static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc, 45static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc,
47 struct blkcipher_walk *walk, 46 struct blkcipher_walk *walk,
48 struct crypto_cipher *tfm, 47 struct crypto_cipher *tfm)
49 void (*xor)(u8 *, const u8 *,
50 unsigned int))
51{ 48{
52 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 49 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
53 crypto_cipher_alg(tfm)->cia_encrypt; 50 crypto_cipher_alg(tfm)->cia_encrypt;
@@ -58,10 +55,10 @@ static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc,
58 u8 *iv = walk->iv; 55 u8 *iv = walk->iv;
59 56
60 do { 57 do {
61 xor(iv, src, bsize); 58 crypto_xor(iv, src, bsize);
62 fn(crypto_cipher_tfm(tfm), dst, iv); 59 fn(crypto_cipher_tfm(tfm), dst, iv);
63 memcpy(iv, dst, bsize); 60 memcpy(iv, dst, bsize);
64 xor(iv, src, bsize); 61 crypto_xor(iv, src, bsize);
65 62
66 src += bsize; 63 src += bsize;
67 dst += bsize; 64 dst += bsize;
@@ -72,9 +69,7 @@ static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc,
72 69
73static int crypto_pcbc_encrypt_inplace(struct blkcipher_desc *desc, 70static int crypto_pcbc_encrypt_inplace(struct blkcipher_desc *desc,
74 struct blkcipher_walk *walk, 71 struct blkcipher_walk *walk,
75 struct crypto_cipher *tfm, 72 struct crypto_cipher *tfm)
76 void (*xor)(u8 *, const u8 *,
77 unsigned int))
78{ 73{
79 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 74 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
80 crypto_cipher_alg(tfm)->cia_encrypt; 75 crypto_cipher_alg(tfm)->cia_encrypt;
@@ -86,10 +81,10 @@ static int crypto_pcbc_encrypt_inplace(struct blkcipher_desc *desc,
86 81
87 do { 82 do {
88 memcpy(tmpbuf, src, bsize); 83 memcpy(tmpbuf, src, bsize);
89 xor(iv, tmpbuf, bsize); 84 crypto_xor(iv, src, bsize);
90 fn(crypto_cipher_tfm(tfm), src, iv); 85 fn(crypto_cipher_tfm(tfm), src, iv);
91 memcpy(iv, src, bsize); 86 memcpy(iv, tmpbuf, bsize);
92 xor(iv, tmpbuf, bsize); 87 crypto_xor(iv, src, bsize);
93 88
94 src += bsize; 89 src += bsize;
95 } while ((nbytes -= bsize) >= bsize); 90 } while ((nbytes -= bsize) >= bsize);
@@ -107,7 +102,6 @@ static int crypto_pcbc_encrypt(struct blkcipher_desc *desc,
107 struct crypto_blkcipher *tfm = desc->tfm; 102 struct crypto_blkcipher *tfm = desc->tfm;
108 struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm); 103 struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
109 struct crypto_cipher *child = ctx->child; 104 struct crypto_cipher *child = ctx->child;
110 void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor;
111 int err; 105 int err;
112 106
113 blkcipher_walk_init(&walk, dst, src, nbytes); 107 blkcipher_walk_init(&walk, dst, src, nbytes);
@@ -115,11 +109,11 @@ static int crypto_pcbc_encrypt(struct blkcipher_desc *desc,
115 109
116 while ((nbytes = walk.nbytes)) { 110 while ((nbytes = walk.nbytes)) {
117 if (walk.src.virt.addr == walk.dst.virt.addr) 111 if (walk.src.virt.addr == walk.dst.virt.addr)
118 nbytes = crypto_pcbc_encrypt_inplace(desc, &walk, child, 112 nbytes = crypto_pcbc_encrypt_inplace(desc, &walk,
119 xor); 113 child);
120 else 114 else
121 nbytes = crypto_pcbc_encrypt_segment(desc, &walk, child, 115 nbytes = crypto_pcbc_encrypt_segment(desc, &walk,
122 xor); 116 child);
123 err = blkcipher_walk_done(desc, &walk, nbytes); 117 err = blkcipher_walk_done(desc, &walk, nbytes);
124 } 118 }
125 119
@@ -128,9 +122,7 @@ static int crypto_pcbc_encrypt(struct blkcipher_desc *desc,
128 122
129static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc, 123static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc,
130 struct blkcipher_walk *walk, 124 struct blkcipher_walk *walk,
131 struct crypto_cipher *tfm, 125 struct crypto_cipher *tfm)
132 void (*xor)(u8 *, const u8 *,
133 unsigned int))
134{ 126{
135 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 127 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
136 crypto_cipher_alg(tfm)->cia_decrypt; 128 crypto_cipher_alg(tfm)->cia_decrypt;
@@ -142,9 +134,9 @@ static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc,
142 134
143 do { 135 do {
144 fn(crypto_cipher_tfm(tfm), dst, src); 136 fn(crypto_cipher_tfm(tfm), dst, src);
145 xor(dst, iv, bsize); 137 crypto_xor(dst, iv, bsize);
146 memcpy(iv, src, bsize); 138 memcpy(iv, src, bsize);
147 xor(iv, dst, bsize); 139 crypto_xor(iv, dst, bsize);
148 140
149 src += bsize; 141 src += bsize;
150 dst += bsize; 142 dst += bsize;
@@ -157,9 +149,7 @@ static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc,
157 149
158static int crypto_pcbc_decrypt_inplace(struct blkcipher_desc *desc, 150static int crypto_pcbc_decrypt_inplace(struct blkcipher_desc *desc,
159 struct blkcipher_walk *walk, 151 struct blkcipher_walk *walk,
160 struct crypto_cipher *tfm, 152 struct crypto_cipher *tfm)
161 void (*xor)(u8 *, const u8 *,
162 unsigned int))
163{ 153{
164 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 154 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
165 crypto_cipher_alg(tfm)->cia_decrypt; 155 crypto_cipher_alg(tfm)->cia_decrypt;
@@ -172,9 +162,9 @@ static int crypto_pcbc_decrypt_inplace(struct blkcipher_desc *desc,
172 do { 162 do {
173 memcpy(tmpbuf, src, bsize); 163 memcpy(tmpbuf, src, bsize);
174 fn(crypto_cipher_tfm(tfm), src, src); 164 fn(crypto_cipher_tfm(tfm), src, src);
175 xor(src, iv, bsize); 165 crypto_xor(src, iv, bsize);
176 memcpy(iv, tmpbuf, bsize); 166 memcpy(iv, tmpbuf, bsize);
177 xor(iv, src, bsize); 167 crypto_xor(iv, src, bsize);
178 168
179 src += bsize; 169 src += bsize;
180 } while ((nbytes -= bsize) >= bsize); 170 } while ((nbytes -= bsize) >= bsize);
@@ -192,7 +182,6 @@ static int crypto_pcbc_decrypt(struct blkcipher_desc *desc,
192 struct crypto_blkcipher *tfm = desc->tfm; 182 struct crypto_blkcipher *tfm = desc->tfm;
193 struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm); 183 struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
194 struct crypto_cipher *child = ctx->child; 184 struct crypto_cipher *child = ctx->child;
195 void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor;
196 int err; 185 int err;
197 186
198 blkcipher_walk_init(&walk, dst, src, nbytes); 187 blkcipher_walk_init(&walk, dst, src, nbytes);
@@ -200,48 +189,17 @@ static int crypto_pcbc_decrypt(struct blkcipher_desc *desc,
200 189
201 while ((nbytes = walk.nbytes)) { 190 while ((nbytes = walk.nbytes)) {
202 if (walk.src.virt.addr == walk.dst.virt.addr) 191 if (walk.src.virt.addr == walk.dst.virt.addr)
203 nbytes = crypto_pcbc_decrypt_inplace(desc, &walk, child, 192 nbytes = crypto_pcbc_decrypt_inplace(desc, &walk,
204 xor); 193 child);
205 else 194 else
206 nbytes = crypto_pcbc_decrypt_segment(desc, &walk, child, 195 nbytes = crypto_pcbc_decrypt_segment(desc, &walk,
207 xor); 196 child);
208 err = blkcipher_walk_done(desc, &walk, nbytes); 197 err = blkcipher_walk_done(desc, &walk, nbytes);
209 } 198 }
210 199
211 return err; 200 return err;
212} 201}
213 202
214static void xor_byte(u8 *a, const u8 *b, unsigned int bs)
215{
216 do {
217 *a++ ^= *b++;
218 } while (--bs);
219}
220
221static void xor_quad(u8 *dst, const u8 *src, unsigned int bs)
222{
223 u32 *a = (u32 *)dst;
224 u32 *b = (u32 *)src;
225
226 do {
227 *a++ ^= *b++;
228 } while ((bs -= 4));
229}
230
231static void xor_64(u8 *a, const u8 *b, unsigned int bs)
232{
233 ((u32 *)a)[0] ^= ((u32 *)b)[0];
234 ((u32 *)a)[1] ^= ((u32 *)b)[1];
235}
236
237static void xor_128(u8 *a, const u8 *b, unsigned int bs)
238{
239 ((u32 *)a)[0] ^= ((u32 *)b)[0];
240 ((u32 *)a)[1] ^= ((u32 *)b)[1];
241 ((u32 *)a)[2] ^= ((u32 *)b)[2];
242 ((u32 *)a)[3] ^= ((u32 *)b)[3];
243}
244
245static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm) 203static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm)
246{ 204{
247 struct crypto_instance *inst = (void *)tfm->__crt_alg; 205 struct crypto_instance *inst = (void *)tfm->__crt_alg;
@@ -249,22 +207,6 @@ static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm)
249 struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm); 207 struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm);
250 struct crypto_cipher *cipher; 208 struct crypto_cipher *cipher;
251 209
252 switch (crypto_tfm_alg_blocksize(tfm)) {
253 case 8:
254 ctx->xor = xor_64;
255 break;
256
257 case 16:
258 ctx->xor = xor_128;
259 break;
260
261 default:
262 if (crypto_tfm_alg_blocksize(tfm) % 4)
263 ctx->xor = xor_byte;
264 else
265 ctx->xor = xor_quad;
266 }
267
268 cipher = crypto_spawn_cipher(spawn); 210 cipher = crypto_spawn_cipher(spawn);
269 if (IS_ERR(cipher)) 211 if (IS_ERR(cipher))
270 return PTR_ERR(cipher); 212 return PTR_ERR(cipher);
@@ -304,8 +246,9 @@ static struct crypto_instance *crypto_pcbc_alloc(struct rtattr **tb)
304 inst->alg.cra_alignmask = alg->cra_alignmask; 246 inst->alg.cra_alignmask = alg->cra_alignmask;
305 inst->alg.cra_type = &crypto_blkcipher_type; 247 inst->alg.cra_type = &crypto_blkcipher_type;
306 248
307 if (!(alg->cra_blocksize % 4)) 249 /* We access the data as u32s when xoring. */
308 inst->alg.cra_alignmask |= 3; 250 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
251
309 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize; 252 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
310 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize; 253 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
311 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize; 254 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
diff --git a/crypto/salsa20_generic.c b/crypto/salsa20_generic.c
new file mode 100644
index 000000000000..1fa4e4ddcab5
--- /dev/null
+++ b/crypto/salsa20_generic.c
@@ -0,0 +1,255 @@
1/*
2 * Salsa20: Salsa20 stream cipher algorithm
3 *
4 * Copyright (c) 2007 Tan Swee Heng <thesweeheng@gmail.com>
5 *
6 * Derived from:
7 * - salsa20.c: Public domain C code by Daniel J. Bernstein <djb@cr.yp.to>
8 *
9 * Salsa20 is a stream cipher candidate in eSTREAM, the ECRYPT Stream
10 * Cipher Project. It is designed by Daniel J. Bernstein <djb@cr.yp.to>.
11 * More information about eSTREAM and Salsa20 can be found here:
12 * http://www.ecrypt.eu.org/stream/
13 * http://cr.yp.to/snuffle.html
14 *
15 * This program is free software; you can redistribute it and/or modify it
16 * under the terms of the GNU General Public License as published by the Free
17 * Software Foundation; either version 2 of the License, or (at your option)
18 * any later version.
19 *
20 */
21
22#include <linux/init.h>
23#include <linux/module.h>
24#include <linux/errno.h>
25#include <linux/crypto.h>
26#include <linux/types.h>
27#include <crypto/algapi.h>
28#include <asm/byteorder.h>
29
30#define SALSA20_IV_SIZE 8U
31#define SALSA20_MIN_KEY_SIZE 16U
32#define SALSA20_MAX_KEY_SIZE 32U
33
34/*
35 * Start of code taken from D. J. Bernstein's reference implementation.
36 * With some modifications and optimizations made to suit our needs.
37 */
38
39/*
40salsa20-ref.c version 20051118
41D. J. Bernstein
42Public domain.
43*/
44
45#define ROTATE(v,n) (((v) << (n)) | ((v) >> (32 - (n))))
46#define XOR(v,w) ((v) ^ (w))
47#define PLUS(v,w) (((v) + (w)))
48#define PLUSONE(v) (PLUS((v),1))
49#define U32TO8_LITTLE(p, v) \
50 { (p)[0] = (v >> 0) & 0xff; (p)[1] = (v >> 8) & 0xff; \
51 (p)[2] = (v >> 16) & 0xff; (p)[3] = (v >> 24) & 0xff; }
52#define U8TO32_LITTLE(p) \
53 (((u32)((p)[0]) ) | ((u32)((p)[1]) << 8) | \
54 ((u32)((p)[2]) << 16) | ((u32)((p)[3]) << 24) )
55
56struct salsa20_ctx
57{
58 u32 input[16];
59};
60
61static void salsa20_wordtobyte(u8 output[64], const u32 input[16])
62{
63 u32 x[16];
64 int i;
65
66 memcpy(x, input, sizeof(x));
67 for (i = 20; i > 0; i -= 2) {
68 x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 0],x[12]), 7));
69 x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[ 4],x[ 0]), 9));
70 x[12] = XOR(x[12],ROTATE(PLUS(x[ 8],x[ 4]),13));
71 x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[12],x[ 8]),18));
72 x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 5],x[ 1]), 7));
73 x[13] = XOR(x[13],ROTATE(PLUS(x[ 9],x[ 5]), 9));
74 x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[13],x[ 9]),13));
75 x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 1],x[13]),18));
76 x[14] = XOR(x[14],ROTATE(PLUS(x[10],x[ 6]), 7));
77 x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[14],x[10]), 9));
78 x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 2],x[14]),13));
79 x[10] = XOR(x[10],ROTATE(PLUS(x[ 6],x[ 2]),18));
80 x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[15],x[11]), 7));
81 x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 3],x[15]), 9));
82 x[11] = XOR(x[11],ROTATE(PLUS(x[ 7],x[ 3]),13));
83 x[15] = XOR(x[15],ROTATE(PLUS(x[11],x[ 7]),18));
84 x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[ 0],x[ 3]), 7));
85 x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[ 1],x[ 0]), 9));
86 x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[ 2],x[ 1]),13));
87 x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[ 3],x[ 2]),18));
88 x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 5],x[ 4]), 7));
89 x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 6],x[ 5]), 9));
90 x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 7],x[ 6]),13));
91 x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 4],x[ 7]),18));
92 x[11] = XOR(x[11],ROTATE(PLUS(x[10],x[ 9]), 7));
93 x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[11],x[10]), 9));
94 x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 8],x[11]),13));
95 x[10] = XOR(x[10],ROTATE(PLUS(x[ 9],x[ 8]),18));
96 x[12] = XOR(x[12],ROTATE(PLUS(x[15],x[14]), 7));
97 x[13] = XOR(x[13],ROTATE(PLUS(x[12],x[15]), 9));
98 x[14] = XOR(x[14],ROTATE(PLUS(x[13],x[12]),13));
99 x[15] = XOR(x[15],ROTATE(PLUS(x[14],x[13]),18));
100 }
101 for (i = 0; i < 16; ++i)
102 x[i] = PLUS(x[i],input[i]);
103 for (i = 0; i < 16; ++i)
104 U32TO8_LITTLE(output + 4 * i,x[i]);
105}
106
107static const char sigma[16] = "expand 32-byte k";
108static const char tau[16] = "expand 16-byte k";
109
110static void salsa20_keysetup(struct salsa20_ctx *ctx, const u8 *k, u32 kbytes)
111{
112 const char *constants;
113
114 ctx->input[1] = U8TO32_LITTLE(k + 0);
115 ctx->input[2] = U8TO32_LITTLE(k + 4);
116 ctx->input[3] = U8TO32_LITTLE(k + 8);
117 ctx->input[4] = U8TO32_LITTLE(k + 12);
118 if (kbytes == 32) { /* recommended */
119 k += 16;
120 constants = sigma;
121 } else { /* kbytes == 16 */
122 constants = tau;
123 }
124 ctx->input[11] = U8TO32_LITTLE(k + 0);
125 ctx->input[12] = U8TO32_LITTLE(k + 4);
126 ctx->input[13] = U8TO32_LITTLE(k + 8);
127 ctx->input[14] = U8TO32_LITTLE(k + 12);
128 ctx->input[0] = U8TO32_LITTLE(constants + 0);
129 ctx->input[5] = U8TO32_LITTLE(constants + 4);
130 ctx->input[10] = U8TO32_LITTLE(constants + 8);
131 ctx->input[15] = U8TO32_LITTLE(constants + 12);
132}
133
134static void salsa20_ivsetup(struct salsa20_ctx *ctx, const u8 *iv)
135{
136 ctx->input[6] = U8TO32_LITTLE(iv + 0);
137 ctx->input[7] = U8TO32_LITTLE(iv + 4);
138 ctx->input[8] = 0;
139 ctx->input[9] = 0;
140}
141
142static void salsa20_encrypt_bytes(struct salsa20_ctx *ctx, u8 *dst,
143 const u8 *src, unsigned int bytes)
144{
145 u8 buf[64];
146
147 if (dst != src)
148 memcpy(dst, src, bytes);
149
150 while (bytes) {
151 salsa20_wordtobyte(buf, ctx->input);
152
153 ctx->input[8] = PLUSONE(ctx->input[8]);
154 if (!ctx->input[8])
155 ctx->input[9] = PLUSONE(ctx->input[9]);
156
157 if (bytes <= 64) {
158 crypto_xor(dst, buf, bytes);
159 return;
160 }
161
162 crypto_xor(dst, buf, 64);
163 bytes -= 64;
164 dst += 64;
165 }
166}
167
168/*
169 * End of code taken from D. J. Bernstein's reference implementation.
170 */
171
172static int setkey(struct crypto_tfm *tfm, const u8 *key,
173 unsigned int keysize)
174{
175 struct salsa20_ctx *ctx = crypto_tfm_ctx(tfm);
176 salsa20_keysetup(ctx, key, keysize);
177 return 0;
178}
179
180static int encrypt(struct blkcipher_desc *desc,
181 struct scatterlist *dst, struct scatterlist *src,
182 unsigned int nbytes)
183{
184 struct blkcipher_walk walk;
185 struct crypto_blkcipher *tfm = desc->tfm;
186 struct salsa20_ctx *ctx = crypto_blkcipher_ctx(tfm);
187 int err;
188
189 blkcipher_walk_init(&walk, dst, src, nbytes);
190 err = blkcipher_walk_virt_block(desc, &walk, 64);
191
192 salsa20_ivsetup(ctx, walk.iv);
193
194 if (likely(walk.nbytes == nbytes))
195 {
196 salsa20_encrypt_bytes(ctx, walk.dst.virt.addr,
197 walk.src.virt.addr, nbytes);
198 return blkcipher_walk_done(desc, &walk, 0);
199 }
200
201 while (walk.nbytes >= 64) {
202 salsa20_encrypt_bytes(ctx, walk.dst.virt.addr,
203 walk.src.virt.addr,
204 walk.nbytes - (walk.nbytes % 64));
205 err = blkcipher_walk_done(desc, &walk, walk.nbytes % 64);
206 }
207
208 if (walk.nbytes) {
209 salsa20_encrypt_bytes(ctx, walk.dst.virt.addr,
210 walk.src.virt.addr, walk.nbytes);
211 err = blkcipher_walk_done(desc, &walk, 0);
212 }
213
214 return err;
215}
216
217static struct crypto_alg alg = {
218 .cra_name = "salsa20",
219 .cra_driver_name = "salsa20-generic",
220 .cra_priority = 100,
221 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
222 .cra_type = &crypto_blkcipher_type,
223 .cra_blocksize = 1,
224 .cra_ctxsize = sizeof(struct salsa20_ctx),
225 .cra_alignmask = 3,
226 .cra_module = THIS_MODULE,
227 .cra_list = LIST_HEAD_INIT(alg.cra_list),
228 .cra_u = {
229 .blkcipher = {
230 .setkey = setkey,
231 .encrypt = encrypt,
232 .decrypt = encrypt,
233 .min_keysize = SALSA20_MIN_KEY_SIZE,
234 .max_keysize = SALSA20_MAX_KEY_SIZE,
235 .ivsize = SALSA20_IV_SIZE,
236 }
237 }
238};
239
240static int __init init(void)
241{
242 return crypto_register_alg(&alg);
243}
244
245static void __exit fini(void)
246{
247 crypto_unregister_alg(&alg);
248}
249
250module_init(init);
251module_exit(fini);
252
253MODULE_LICENSE("GPL");
254MODULE_DESCRIPTION ("Salsa20 stream cipher algorithm");
255MODULE_ALIAS("salsa20");
diff --git a/crypto/scatterwalk.c b/crypto/scatterwalk.c
index b9bbda0bb9f9..9aeeb52004a5 100644
--- a/crypto/scatterwalk.c
+++ b/crypto/scatterwalk.c
@@ -13,6 +13,8 @@
13 * any later version. 13 * any later version.
14 * 14 *
15 */ 15 */
16
17#include <crypto/scatterwalk.h>
16#include <linux/kernel.h> 18#include <linux/kernel.h>
17#include <linux/mm.h> 19#include <linux/mm.h>
18#include <linux/module.h> 20#include <linux/module.h>
@@ -20,9 +22,6 @@
20#include <linux/highmem.h> 22#include <linux/highmem.h>
21#include <linux/scatterlist.h> 23#include <linux/scatterlist.h>
22 24
23#include "internal.h"
24#include "scatterwalk.h"
25
26static inline void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out) 25static inline void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out)
27{ 26{
28 void *src = out ? buf : sgdata; 27 void *src = out ? buf : sgdata;
@@ -106,6 +105,9 @@ void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg,
106 struct scatter_walk walk; 105 struct scatter_walk walk;
107 unsigned int offset = 0; 106 unsigned int offset = 0;
108 107
108 if (!nbytes)
109 return;
110
109 for (;;) { 111 for (;;) {
110 scatterwalk_start(&walk, sg); 112 scatterwalk_start(&walk, sg);
111 113
@@ -113,7 +115,7 @@ void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg,
113 break; 115 break;
114 116
115 offset += sg->length; 117 offset += sg->length;
116 sg = sg_next(sg); 118 sg = scatterwalk_sg_next(sg);
117 } 119 }
118 120
119 scatterwalk_advance(&walk, start - offset); 121 scatterwalk_advance(&walk, start - offset);
diff --git a/crypto/seqiv.c b/crypto/seqiv.c
new file mode 100644
index 000000000000..b903aab31577
--- /dev/null
+++ b/crypto/seqiv.c
@@ -0,0 +1,345 @@
1/*
2 * seqiv: Sequence Number IV Generator
3 *
4 * This generator generates an IV based on a sequence number by xoring it
5 * with a salt. This algorithm is mainly useful for CTR and similar modes.
6 *
7 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
16#include <crypto/internal/aead.h>
17#include <crypto/internal/skcipher.h>
18#include <linux/err.h>
19#include <linux/init.h>
20#include <linux/kernel.h>
21#include <linux/module.h>
22#include <linux/random.h>
23#include <linux/spinlock.h>
24#include <linux/string.h>
25
26struct seqiv_ctx {
27 spinlock_t lock;
28 u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
29};
30
31static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err)
32{
33 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
34 struct crypto_ablkcipher *geniv;
35
36 if (err == -EINPROGRESS)
37 return;
38
39 if (err)
40 goto out;
41
42 geniv = skcipher_givcrypt_reqtfm(req);
43 memcpy(req->creq.info, subreq->info, crypto_ablkcipher_ivsize(geniv));
44
45out:
46 kfree(subreq->info);
47}
48
49static void seqiv_complete(struct crypto_async_request *base, int err)
50{
51 struct skcipher_givcrypt_request *req = base->data;
52
53 seqiv_complete2(req, err);
54 skcipher_givcrypt_complete(req, err);
55}
56
57static void seqiv_aead_complete2(struct aead_givcrypt_request *req, int err)
58{
59 struct aead_request *subreq = aead_givcrypt_reqctx(req);
60 struct crypto_aead *geniv;
61
62 if (err == -EINPROGRESS)
63 return;
64
65 if (err)
66 goto out;
67
68 geniv = aead_givcrypt_reqtfm(req);
69 memcpy(req->areq.iv, subreq->iv, crypto_aead_ivsize(geniv));
70
71out:
72 kfree(subreq->iv);
73}
74
75static void seqiv_aead_complete(struct crypto_async_request *base, int err)
76{
77 struct aead_givcrypt_request *req = base->data;
78
79 seqiv_aead_complete2(req, err);
80 aead_givcrypt_complete(req, err);
81}
82
83static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq,
84 unsigned int ivsize)
85{
86 unsigned int len = ivsize;
87
88 if (ivsize > sizeof(u64)) {
89 memset(info, 0, ivsize - sizeof(u64));
90 len = sizeof(u64);
91 }
92 seq = cpu_to_be64(seq);
93 memcpy(info + ivsize - len, &seq, len);
94 crypto_xor(info, ctx->salt, ivsize);
95}
96
97static int seqiv_givencrypt(struct skcipher_givcrypt_request *req)
98{
99 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
100 struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
101 struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
102 crypto_completion_t complete;
103 void *data;
104 u8 *info;
105 unsigned int ivsize;
106 int err;
107
108 ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
109
110 complete = req->creq.base.complete;
111 data = req->creq.base.data;
112 info = req->creq.info;
113
114 ivsize = crypto_ablkcipher_ivsize(geniv);
115
116 if (unlikely(!IS_ALIGNED((unsigned long)info,
117 crypto_ablkcipher_alignmask(geniv) + 1))) {
118 info = kmalloc(ivsize, req->creq.base.flags &
119 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
120 GFP_ATOMIC);
121 if (!info)
122 return -ENOMEM;
123
124 complete = seqiv_complete;
125 data = req;
126 }
127
128 ablkcipher_request_set_callback(subreq, req->creq.base.flags, complete,
129 data);
130 ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst,
131 req->creq.nbytes, info);
132
133 seqiv_geniv(ctx, info, req->seq, ivsize);
134 memcpy(req->giv, info, ivsize);
135
136 err = crypto_ablkcipher_encrypt(subreq);
137 if (unlikely(info != req->creq.info))
138 seqiv_complete2(req, err);
139 return err;
140}
141
142static int seqiv_aead_givencrypt(struct aead_givcrypt_request *req)
143{
144 struct crypto_aead *geniv = aead_givcrypt_reqtfm(req);
145 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
146 struct aead_request *areq = &req->areq;
147 struct aead_request *subreq = aead_givcrypt_reqctx(req);
148 crypto_completion_t complete;
149 void *data;
150 u8 *info;
151 unsigned int ivsize;
152 int err;
153
154 aead_request_set_tfm(subreq, aead_geniv_base(geniv));
155
156 complete = areq->base.complete;
157 data = areq->base.data;
158 info = areq->iv;
159
160 ivsize = crypto_aead_ivsize(geniv);
161
162 if (unlikely(!IS_ALIGNED((unsigned long)info,
163 crypto_aead_alignmask(geniv) + 1))) {
164 info = kmalloc(ivsize, areq->base.flags &
165 CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
166 GFP_ATOMIC);
167 if (!info)
168 return -ENOMEM;
169
170 complete = seqiv_aead_complete;
171 data = req;
172 }
173
174 aead_request_set_callback(subreq, areq->base.flags, complete, data);
175 aead_request_set_crypt(subreq, areq->src, areq->dst, areq->cryptlen,
176 info);
177 aead_request_set_assoc(subreq, areq->assoc, areq->assoclen);
178
179 seqiv_geniv(ctx, info, req->seq, ivsize);
180 memcpy(req->giv, info, ivsize);
181
182 err = crypto_aead_encrypt(subreq);
183 if (unlikely(info != areq->iv))
184 seqiv_aead_complete2(req, err);
185 return err;
186}
187
188static int seqiv_givencrypt_first(struct skcipher_givcrypt_request *req)
189{
190 struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
191 struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
192
193 spin_lock_bh(&ctx->lock);
194 if (crypto_ablkcipher_crt(geniv)->givencrypt != seqiv_givencrypt_first)
195 goto unlock;
196
197 crypto_ablkcipher_crt(geniv)->givencrypt = seqiv_givencrypt;
198 get_random_bytes(ctx->salt, crypto_ablkcipher_ivsize(geniv));
199
200unlock:
201 spin_unlock_bh(&ctx->lock);
202
203 return seqiv_givencrypt(req);
204}
205
206static int seqiv_aead_givencrypt_first(struct aead_givcrypt_request *req)
207{
208 struct crypto_aead *geniv = aead_givcrypt_reqtfm(req);
209 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
210
211 spin_lock_bh(&ctx->lock);
212 if (crypto_aead_crt(geniv)->givencrypt != seqiv_aead_givencrypt_first)
213 goto unlock;
214
215 crypto_aead_crt(geniv)->givencrypt = seqiv_aead_givencrypt;
216 get_random_bytes(ctx->salt, crypto_aead_ivsize(geniv));
217
218unlock:
219 spin_unlock_bh(&ctx->lock);
220
221 return seqiv_aead_givencrypt(req);
222}
223
224static int seqiv_init(struct crypto_tfm *tfm)
225{
226 struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm);
227 struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
228
229 spin_lock_init(&ctx->lock);
230
231 tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request);
232
233 return skcipher_geniv_init(tfm);
234}
235
236static int seqiv_aead_init(struct crypto_tfm *tfm)
237{
238 struct crypto_aead *geniv = __crypto_aead_cast(tfm);
239 struct seqiv_ctx *ctx = crypto_aead_ctx(geniv);
240
241 spin_lock_init(&ctx->lock);
242
243 tfm->crt_aead.reqsize = sizeof(struct aead_request);
244
245 return aead_geniv_init(tfm);
246}
247
248static struct crypto_template seqiv_tmpl;
249
250static struct crypto_instance *seqiv_ablkcipher_alloc(struct rtattr **tb)
251{
252 struct crypto_instance *inst;
253
254 inst = skcipher_geniv_alloc(&seqiv_tmpl, tb, 0, 0);
255
256 if (IS_ERR(inst))
257 goto out;
258
259 inst->alg.cra_ablkcipher.givencrypt = seqiv_givencrypt_first;
260
261 inst->alg.cra_init = seqiv_init;
262 inst->alg.cra_exit = skcipher_geniv_exit;
263
264 inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
265
266out:
267 return inst;
268}
269
270static struct crypto_instance *seqiv_aead_alloc(struct rtattr **tb)
271{
272 struct crypto_instance *inst;
273
274 inst = aead_geniv_alloc(&seqiv_tmpl, tb, 0, 0);
275
276 if (IS_ERR(inst))
277 goto out;
278
279 inst->alg.cra_aead.givencrypt = seqiv_aead_givencrypt_first;
280
281 inst->alg.cra_init = seqiv_aead_init;
282 inst->alg.cra_exit = aead_geniv_exit;
283
284 inst->alg.cra_ctxsize = inst->alg.cra_aead.ivsize;
285
286out:
287 return inst;
288}
289
290static struct crypto_instance *seqiv_alloc(struct rtattr **tb)
291{
292 struct crypto_attr_type *algt;
293 struct crypto_instance *inst;
294 int err;
295
296 algt = crypto_get_attr_type(tb);
297 err = PTR_ERR(algt);
298 if (IS_ERR(algt))
299 return ERR_PTR(err);
300
301 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
302 inst = seqiv_ablkcipher_alloc(tb);
303 else
304 inst = seqiv_aead_alloc(tb);
305
306 if (IS_ERR(inst))
307 goto out;
308
309 inst->alg.cra_alignmask |= __alignof__(u32) - 1;
310 inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
311
312out:
313 return inst;
314}
315
316static void seqiv_free(struct crypto_instance *inst)
317{
318 if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
319 skcipher_geniv_free(inst);
320 else
321 aead_geniv_free(inst);
322}
323
324static struct crypto_template seqiv_tmpl = {
325 .name = "seqiv",
326 .alloc = seqiv_alloc,
327 .free = seqiv_free,
328 .module = THIS_MODULE,
329};
330
331static int __init seqiv_module_init(void)
332{
333 return crypto_register_template(&seqiv_tmpl);
334}
335
336static void __exit seqiv_module_exit(void)
337{
338 crypto_unregister_template(&seqiv_tmpl);
339}
340
341module_init(seqiv_module_init);
342module_exit(seqiv_module_exit);
343
344MODULE_LICENSE("GPL");
345MODULE_DESCRIPTION("Sequence Number IV Generator");
diff --git a/crypto/sha256_generic.c b/crypto/sha256_generic.c
index fd3918be58b5..3cc93fd61043 100644
--- a/crypto/sha256_generic.c
+++ b/crypto/sha256_generic.c
@@ -9,6 +9,7 @@
9 * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com> 9 * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
10 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk> 10 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
11 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 11 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
12 * SHA224 Support Copyright 2007 Intel Corporation <jonathan.lynch@intel.com>
12 * 13 *
13 * This program is free software; you can redistribute it and/or modify it 14 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free 15 * under the terms of the GNU General Public License as published by the Free
@@ -218,6 +219,22 @@ static void sha256_transform(u32 *state, const u8 *input)
218 memset(W, 0, 64 * sizeof(u32)); 219 memset(W, 0, 64 * sizeof(u32));
219} 220}
220 221
222
223static void sha224_init(struct crypto_tfm *tfm)
224{
225 struct sha256_ctx *sctx = crypto_tfm_ctx(tfm);
226 sctx->state[0] = SHA224_H0;
227 sctx->state[1] = SHA224_H1;
228 sctx->state[2] = SHA224_H2;
229 sctx->state[3] = SHA224_H3;
230 sctx->state[4] = SHA224_H4;
231 sctx->state[5] = SHA224_H5;
232 sctx->state[6] = SHA224_H6;
233 sctx->state[7] = SHA224_H7;
234 sctx->count[0] = 0;
235 sctx->count[1] = 0;
236}
237
221static void sha256_init(struct crypto_tfm *tfm) 238static void sha256_init(struct crypto_tfm *tfm)
222{ 239{
223 struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); 240 struct sha256_ctx *sctx = crypto_tfm_ctx(tfm);
@@ -294,8 +311,17 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out)
294 memset(sctx, 0, sizeof(*sctx)); 311 memset(sctx, 0, sizeof(*sctx));
295} 312}
296 313
314static void sha224_final(struct crypto_tfm *tfm, u8 *hash)
315{
316 u8 D[SHA256_DIGEST_SIZE];
317
318 sha256_final(tfm, D);
319
320 memcpy(hash, D, SHA224_DIGEST_SIZE);
321 memset(D, 0, SHA256_DIGEST_SIZE);
322}
297 323
298static struct crypto_alg alg = { 324static struct crypto_alg sha256 = {
299 .cra_name = "sha256", 325 .cra_name = "sha256",
300 .cra_driver_name= "sha256-generic", 326 .cra_driver_name= "sha256-generic",
301 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 327 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
@@ -303,28 +329,58 @@ static struct crypto_alg alg = {
303 .cra_ctxsize = sizeof(struct sha256_ctx), 329 .cra_ctxsize = sizeof(struct sha256_ctx),
304 .cra_module = THIS_MODULE, 330 .cra_module = THIS_MODULE,
305 .cra_alignmask = 3, 331 .cra_alignmask = 3,
306 .cra_list = LIST_HEAD_INIT(alg.cra_list), 332 .cra_list = LIST_HEAD_INIT(sha256.cra_list),
307 .cra_u = { .digest = { 333 .cra_u = { .digest = {
308 .dia_digestsize = SHA256_DIGEST_SIZE, 334 .dia_digestsize = SHA256_DIGEST_SIZE,
309 .dia_init = sha256_init, 335 .dia_init = sha256_init,
310 .dia_update = sha256_update, 336 .dia_update = sha256_update,
311 .dia_final = sha256_final } } 337 .dia_final = sha256_final } }
338};
339
340static struct crypto_alg sha224 = {
341 .cra_name = "sha224",
342 .cra_driver_name = "sha224-generic",
343 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
344 .cra_blocksize = SHA224_BLOCK_SIZE,
345 .cra_ctxsize = sizeof(struct sha256_ctx),
346 .cra_module = THIS_MODULE,
347 .cra_alignmask = 3,
348 .cra_list = LIST_HEAD_INIT(sha224.cra_list),
349 .cra_u = { .digest = {
350 .dia_digestsize = SHA224_DIGEST_SIZE,
351 .dia_init = sha224_init,
352 .dia_update = sha256_update,
353 .dia_final = sha224_final } }
312}; 354};
313 355
314static int __init init(void) 356static int __init init(void)
315{ 357{
316 return crypto_register_alg(&alg); 358 int ret = 0;
359
360 ret = crypto_register_alg(&sha224);
361
362 if (ret < 0)
363 return ret;
364
365 ret = crypto_register_alg(&sha256);
366
367 if (ret < 0)
368 crypto_unregister_alg(&sha224);
369
370 return ret;
317} 371}
318 372
319static void __exit fini(void) 373static void __exit fini(void)
320{ 374{
321 crypto_unregister_alg(&alg); 375 crypto_unregister_alg(&sha224);
376 crypto_unregister_alg(&sha256);
322} 377}
323 378
324module_init(init); 379module_init(init);
325module_exit(fini); 380module_exit(fini);
326 381
327MODULE_LICENSE("GPL"); 382MODULE_LICENSE("GPL");
328MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm"); 383MODULE_DESCRIPTION("SHA-224 and SHA-256 Secure Hash Algorithm");
329 384
385MODULE_ALIAS("sha224");
330MODULE_ALIAS("sha256"); 386MODULE_ALIAS("sha256");
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
index 24141fb6f5cb..1ab8c017a011 100644
--- a/crypto/tcrypt.c
+++ b/crypto/tcrypt.c
@@ -6,12 +6,16 @@
6 * 6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
9 * Copyright (c) 2007 Nokia Siemens Networks
9 * 10 *
10 * This program is free software; you can redistribute it and/or modify it 11 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free 12 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option) 13 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version. 14 * any later version.
14 * 15 *
16 * 2007-11-13 Added GCM tests
17 * 2007-11-13 Added AEAD support
18 * 2007-11-06 Added SHA-224 and SHA-224-HMAC tests
15 * 2006-12-07 Added SHA384 HMAC and SHA512 HMAC tests 19 * 2006-12-07 Added SHA384 HMAC and SHA512 HMAC tests
16 * 2004-08-09 Added cipher speed tests (Reyk Floeter <reyk@vantronix.net>) 20 * 2004-08-09 Added cipher speed tests (Reyk Floeter <reyk@vantronix.net>)
17 * 2003-09-14 Rewritten by Kartikey Mahendra Bhatt 21 * 2003-09-14 Rewritten by Kartikey Mahendra Bhatt
@@ -71,22 +75,23 @@ static unsigned int sec;
71 75
72static int mode; 76static int mode;
73static char *xbuf; 77static char *xbuf;
78static char *axbuf;
74static char *tvmem; 79static char *tvmem;
75 80
76static char *check[] = { 81static char *check[] = {
77 "des", "md5", "des3_ede", "rot13", "sha1", "sha256", "blowfish", 82 "des", "md5", "des3_ede", "rot13", "sha1", "sha224", "sha256",
78 "twofish", "serpent", "sha384", "sha512", "md4", "aes", "cast6", 83 "blowfish", "twofish", "serpent", "sha384", "sha512", "md4", "aes",
84 "cast6", "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
79 "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea", 85 "arc4", "michael_mic", "deflate", "crc32c", "tea", "xtea",
80 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt", 86 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
81 "camellia", "seed", NULL 87 "camellia", "seed", "salsa20", "lzo", NULL
82}; 88};
83 89
84static void hexdump(unsigned char *buf, unsigned int len) 90static void hexdump(unsigned char *buf, unsigned int len)
85{ 91{
86 while (len--) 92 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
87 printk("%02x", *buf++); 93 16, 1,
88 94 buf, len, false);
89 printk("\n");
90} 95}
91 96
92static void tcrypt_complete(struct crypto_async_request *req, int err) 97static void tcrypt_complete(struct crypto_async_request *req, int err)
@@ -215,6 +220,238 @@ out:
215 crypto_free_hash(tfm); 220 crypto_free_hash(tfm);
216} 221}
217 222
223static void test_aead(char *algo, int enc, struct aead_testvec *template,
224 unsigned int tcount)
225{
226 unsigned int ret, i, j, k, temp;
227 unsigned int tsize;
228 char *q;
229 struct crypto_aead *tfm;
230 char *key;
231 struct aead_testvec *aead_tv;
232 struct aead_request *req;
233 struct scatterlist sg[8];
234 struct scatterlist asg[8];
235 const char *e;
236 struct tcrypt_result result;
237 unsigned int authsize;
238
239 if (enc == ENCRYPT)
240 e = "encryption";
241 else
242 e = "decryption";
243
244 printk(KERN_INFO "\ntesting %s %s\n", algo, e);
245
246 tsize = sizeof(struct aead_testvec);
247 tsize *= tcount;
248
249 if (tsize > TVMEMSIZE) {
250 printk(KERN_INFO "template (%u) too big for tvmem (%u)\n",
251 tsize, TVMEMSIZE);
252 return;
253 }
254
255 memcpy(tvmem, template, tsize);
256 aead_tv = (void *)tvmem;
257
258 init_completion(&result.completion);
259
260 tfm = crypto_alloc_aead(algo, 0, 0);
261
262 if (IS_ERR(tfm)) {
263 printk(KERN_INFO "failed to load transform for %s: %ld\n",
264 algo, PTR_ERR(tfm));
265 return;
266 }
267
268 req = aead_request_alloc(tfm, GFP_KERNEL);
269 if (!req) {
270 printk(KERN_INFO "failed to allocate request for %s\n", algo);
271 goto out;
272 }
273
274 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
275 tcrypt_complete, &result);
276
277 for (i = 0, j = 0; i < tcount; i++) {
278 if (!aead_tv[i].np) {
279 printk(KERN_INFO "test %u (%d bit key):\n",
280 ++j, aead_tv[i].klen * 8);
281
282 crypto_aead_clear_flags(tfm, ~0);
283 if (aead_tv[i].wk)
284 crypto_aead_set_flags(
285 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
286 key = aead_tv[i].key;
287
288 ret = crypto_aead_setkey(tfm, key,
289 aead_tv[i].klen);
290 if (ret) {
291 printk(KERN_INFO "setkey() failed flags=%x\n",
292 crypto_aead_get_flags(tfm));
293
294 if (!aead_tv[i].fail)
295 goto out;
296 }
297
298 authsize = abs(aead_tv[i].rlen - aead_tv[i].ilen);
299 ret = crypto_aead_setauthsize(tfm, authsize);
300 if (ret) {
301 printk(KERN_INFO
302 "failed to set authsize = %u\n",
303 authsize);
304 goto out;
305 }
306
307 sg_init_one(&sg[0], aead_tv[i].input,
308 aead_tv[i].ilen + (enc ? authsize : 0));
309
310 sg_init_one(&asg[0], aead_tv[i].assoc,
311 aead_tv[i].alen);
312
313 aead_request_set_crypt(req, sg, sg,
314 aead_tv[i].ilen,
315 aead_tv[i].iv);
316
317 aead_request_set_assoc(req, asg, aead_tv[i].alen);
318
319 ret = enc ?
320 crypto_aead_encrypt(req) :
321 crypto_aead_decrypt(req);
322
323 switch (ret) {
324 case 0:
325 break;
326 case -EINPROGRESS:
327 case -EBUSY:
328 ret = wait_for_completion_interruptible(
329 &result.completion);
330 if (!ret && !(ret = result.err)) {
331 INIT_COMPLETION(result.completion);
332 break;
333 }
334 /* fall through */
335 default:
336 printk(KERN_INFO "%s () failed err=%d\n",
337 e, -ret);
338 goto out;
339 }
340
341 q = kmap(sg_page(&sg[0])) + sg[0].offset;
342 hexdump(q, aead_tv[i].rlen);
343
344 printk(KERN_INFO "enc/dec: %s\n",
345 memcmp(q, aead_tv[i].result,
346 aead_tv[i].rlen) ? "fail" : "pass");
347 }
348 }
349
350 printk(KERN_INFO "\ntesting %s %s across pages (chunking)\n", algo, e);
351 memset(xbuf, 0, XBUFSIZE);
352 memset(axbuf, 0, XBUFSIZE);
353
354 for (i = 0, j = 0; i < tcount; i++) {
355 if (aead_tv[i].np) {
356 printk(KERN_INFO "test %u (%d bit key):\n",
357 ++j, aead_tv[i].klen * 8);
358
359 crypto_aead_clear_flags(tfm, ~0);
360 if (aead_tv[i].wk)
361 crypto_aead_set_flags(
362 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
363 key = aead_tv[i].key;
364
365 ret = crypto_aead_setkey(tfm, key, aead_tv[i].klen);
366 if (ret) {
367 printk(KERN_INFO "setkey() failed flags=%x\n",
368 crypto_aead_get_flags(tfm));
369
370 if (!aead_tv[i].fail)
371 goto out;
372 }
373
374 sg_init_table(sg, aead_tv[i].np);
375 for (k = 0, temp = 0; k < aead_tv[i].np; k++) {
376 memcpy(&xbuf[IDX[k]],
377 aead_tv[i].input + temp,
378 aead_tv[i].tap[k]);
379 temp += aead_tv[i].tap[k];
380 sg_set_buf(&sg[k], &xbuf[IDX[k]],
381 aead_tv[i].tap[k]);
382 }
383
384 authsize = abs(aead_tv[i].rlen - aead_tv[i].ilen);
385 ret = crypto_aead_setauthsize(tfm, authsize);
386 if (ret) {
387 printk(KERN_INFO
388 "failed to set authsize = %u\n",
389 authsize);
390 goto out;
391 }
392
393 if (enc)
394 sg[k - 1].length += authsize;
395
396 sg_init_table(asg, aead_tv[i].anp);
397 for (k = 0, temp = 0; k < aead_tv[i].anp; k++) {
398 memcpy(&axbuf[IDX[k]],
399 aead_tv[i].assoc + temp,
400 aead_tv[i].atap[k]);
401 temp += aead_tv[i].atap[k];
402 sg_set_buf(&asg[k], &axbuf[IDX[k]],
403 aead_tv[i].atap[k]);
404 }
405
406 aead_request_set_crypt(req, sg, sg,
407 aead_tv[i].ilen,
408 aead_tv[i].iv);
409
410 aead_request_set_assoc(req, asg, aead_tv[i].alen);
411
412 ret = enc ?
413 crypto_aead_encrypt(req) :
414 crypto_aead_decrypt(req);
415
416 switch (ret) {
417 case 0:
418 break;
419 case -EINPROGRESS:
420 case -EBUSY:
421 ret = wait_for_completion_interruptible(
422 &result.completion);
423 if (!ret && !(ret = result.err)) {
424 INIT_COMPLETION(result.completion);
425 break;
426 }
427 /* fall through */
428 default:
429 printk(KERN_INFO "%s () failed err=%d\n",
430 e, -ret);
431 goto out;
432 }
433
434 for (k = 0, temp = 0; k < aead_tv[i].np; k++) {
435 printk(KERN_INFO "page %u\n", k);
436 q = kmap(sg_page(&sg[k])) + sg[k].offset;
437 hexdump(q, aead_tv[i].tap[k]);
438 printk(KERN_INFO "%s\n",
439 memcmp(q, aead_tv[i].result + temp,
440 aead_tv[i].tap[k] -
441 (k < aead_tv[i].np - 1 || enc ?
442 0 : authsize)) ?
443 "fail" : "pass");
444
445 temp += aead_tv[i].tap[k];
446 }
447 }
448 }
449
450out:
451 crypto_free_aead(tfm);
452 aead_request_free(req);
453}
454
218static void test_cipher(char *algo, int enc, 455static void test_cipher(char *algo, int enc,
219 struct cipher_testvec *template, unsigned int tcount) 456 struct cipher_testvec *template, unsigned int tcount)
220{ 457{
@@ -237,15 +474,11 @@ static void test_cipher(char *algo, int enc,
237 printk("\ntesting %s %s\n", algo, e); 474 printk("\ntesting %s %s\n", algo, e);
238 475
239 tsize = sizeof (struct cipher_testvec); 476 tsize = sizeof (struct cipher_testvec);
240 tsize *= tcount;
241
242 if (tsize > TVMEMSIZE) { 477 if (tsize > TVMEMSIZE) {
243 printk("template (%u) too big for tvmem (%u)\n", tsize, 478 printk("template (%u) too big for tvmem (%u)\n", tsize,
244 TVMEMSIZE); 479 TVMEMSIZE);
245 return; 480 return;
246 } 481 }
247
248 memcpy(tvmem, template, tsize);
249 cipher_tv = (void *)tvmem; 482 cipher_tv = (void *)tvmem;
250 483
251 init_completion(&result.completion); 484 init_completion(&result.completion);
@@ -269,33 +502,34 @@ static void test_cipher(char *algo, int enc,
269 502
270 j = 0; 503 j = 0;
271 for (i = 0; i < tcount; i++) { 504 for (i = 0; i < tcount; i++) {
272 if (!(cipher_tv[i].np)) { 505 memcpy(cipher_tv, &template[i], tsize);
506 if (!(cipher_tv->np)) {
273 j++; 507 j++;
274 printk("test %u (%d bit key):\n", 508 printk("test %u (%d bit key):\n",
275 j, cipher_tv[i].klen * 8); 509 j, cipher_tv->klen * 8);
276 510
277 crypto_ablkcipher_clear_flags(tfm, ~0); 511 crypto_ablkcipher_clear_flags(tfm, ~0);
278 if (cipher_tv[i].wk) 512 if (cipher_tv->wk)
279 crypto_ablkcipher_set_flags( 513 crypto_ablkcipher_set_flags(
280 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 514 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
281 key = cipher_tv[i].key; 515 key = cipher_tv->key;
282 516
283 ret = crypto_ablkcipher_setkey(tfm, key, 517 ret = crypto_ablkcipher_setkey(tfm, key,
284 cipher_tv[i].klen); 518 cipher_tv->klen);
285 if (ret) { 519 if (ret) {
286 printk("setkey() failed flags=%x\n", 520 printk("setkey() failed flags=%x\n",
287 crypto_ablkcipher_get_flags(tfm)); 521 crypto_ablkcipher_get_flags(tfm));
288 522
289 if (!cipher_tv[i].fail) 523 if (!cipher_tv->fail)
290 goto out; 524 goto out;
291 } 525 }
292 526
293 sg_init_one(&sg[0], cipher_tv[i].input, 527 sg_init_one(&sg[0], cipher_tv->input,
294 cipher_tv[i].ilen); 528 cipher_tv->ilen);
295 529
296 ablkcipher_request_set_crypt(req, sg, sg, 530 ablkcipher_request_set_crypt(req, sg, sg,
297 cipher_tv[i].ilen, 531 cipher_tv->ilen,
298 cipher_tv[i].iv); 532 cipher_tv->iv);
299 533
300 ret = enc ? 534 ret = enc ?
301 crypto_ablkcipher_encrypt(req) : 535 crypto_ablkcipher_encrypt(req) :
@@ -319,11 +553,11 @@ static void test_cipher(char *algo, int enc,
319 } 553 }
320 554
321 q = kmap(sg_page(&sg[0])) + sg[0].offset; 555 q = kmap(sg_page(&sg[0])) + sg[0].offset;
322 hexdump(q, cipher_tv[i].rlen); 556 hexdump(q, cipher_tv->rlen);
323 557
324 printk("%s\n", 558 printk("%s\n",
325 memcmp(q, cipher_tv[i].result, 559 memcmp(q, cipher_tv->result,
326 cipher_tv[i].rlen) ? "fail" : "pass"); 560 cipher_tv->rlen) ? "fail" : "pass");
327 } 561 }
328 } 562 }
329 563
@@ -332,41 +566,42 @@ static void test_cipher(char *algo, int enc,
332 566
333 j = 0; 567 j = 0;
334 for (i = 0; i < tcount; i++) { 568 for (i = 0; i < tcount; i++) {
335 if (cipher_tv[i].np) { 569 memcpy(cipher_tv, &template[i], tsize);
570 if (cipher_tv->np) {
336 j++; 571 j++;
337 printk("test %u (%d bit key):\n", 572 printk("test %u (%d bit key):\n",
338 j, cipher_tv[i].klen * 8); 573 j, cipher_tv->klen * 8);
339 574
340 crypto_ablkcipher_clear_flags(tfm, ~0); 575 crypto_ablkcipher_clear_flags(tfm, ~0);
341 if (cipher_tv[i].wk) 576 if (cipher_tv->wk)
342 crypto_ablkcipher_set_flags( 577 crypto_ablkcipher_set_flags(
343 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 578 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
344 key = cipher_tv[i].key; 579 key = cipher_tv->key;
345 580
346 ret = crypto_ablkcipher_setkey(tfm, key, 581 ret = crypto_ablkcipher_setkey(tfm, key,
347 cipher_tv[i].klen); 582 cipher_tv->klen);
348 if (ret) { 583 if (ret) {
349 printk("setkey() failed flags=%x\n", 584 printk("setkey() failed flags=%x\n",
350 crypto_ablkcipher_get_flags(tfm)); 585 crypto_ablkcipher_get_flags(tfm));
351 586
352 if (!cipher_tv[i].fail) 587 if (!cipher_tv->fail)
353 goto out; 588 goto out;
354 } 589 }
355 590
356 temp = 0; 591 temp = 0;
357 sg_init_table(sg, cipher_tv[i].np); 592 sg_init_table(sg, cipher_tv->np);
358 for (k = 0; k < cipher_tv[i].np; k++) { 593 for (k = 0; k < cipher_tv->np; k++) {
359 memcpy(&xbuf[IDX[k]], 594 memcpy(&xbuf[IDX[k]],
360 cipher_tv[i].input + temp, 595 cipher_tv->input + temp,
361 cipher_tv[i].tap[k]); 596 cipher_tv->tap[k]);
362 temp += cipher_tv[i].tap[k]; 597 temp += cipher_tv->tap[k];
363 sg_set_buf(&sg[k], &xbuf[IDX[k]], 598 sg_set_buf(&sg[k], &xbuf[IDX[k]],
364 cipher_tv[i].tap[k]); 599 cipher_tv->tap[k]);
365 } 600 }
366 601
367 ablkcipher_request_set_crypt(req, sg, sg, 602 ablkcipher_request_set_crypt(req, sg, sg,
368 cipher_tv[i].ilen, 603 cipher_tv->ilen,
369 cipher_tv[i].iv); 604 cipher_tv->iv);
370 605
371 ret = enc ? 606 ret = enc ?
372 crypto_ablkcipher_encrypt(req) : 607 crypto_ablkcipher_encrypt(req) :
@@ -390,15 +625,15 @@ static void test_cipher(char *algo, int enc,
390 } 625 }
391 626
392 temp = 0; 627 temp = 0;
393 for (k = 0; k < cipher_tv[i].np; k++) { 628 for (k = 0; k < cipher_tv->np; k++) {
394 printk("page %u\n", k); 629 printk("page %u\n", k);
395 q = kmap(sg_page(&sg[k])) + sg[k].offset; 630 q = kmap(sg_page(&sg[k])) + sg[k].offset;
396 hexdump(q, cipher_tv[i].tap[k]); 631 hexdump(q, cipher_tv->tap[k]);
397 printk("%s\n", 632 printk("%s\n",
398 memcmp(q, cipher_tv[i].result + temp, 633 memcmp(q, cipher_tv->result + temp,
399 cipher_tv[i].tap[k]) ? "fail" : 634 cipher_tv->tap[k]) ? "fail" :
400 "pass"); 635 "pass");
401 temp += cipher_tv[i].tap[k]; 636 temp += cipher_tv->tap[k];
402 } 637 }
403 } 638 }
404 } 639 }
@@ -800,7 +1035,8 @@ out:
800 crypto_free_hash(tfm); 1035 crypto_free_hash(tfm);
801} 1036}
802 1037
803static void test_deflate(void) 1038static void test_comp(char *algo, struct comp_testvec *ctemplate,
1039 struct comp_testvec *dtemplate, int ctcount, int dtcount)
804{ 1040{
805 unsigned int i; 1041 unsigned int i;
806 char result[COMP_BUF_SIZE]; 1042 char result[COMP_BUF_SIZE];
@@ -808,25 +1044,26 @@ static void test_deflate(void)
808 struct comp_testvec *tv; 1044 struct comp_testvec *tv;
809 unsigned int tsize; 1045 unsigned int tsize;
810 1046
811 printk("\ntesting deflate compression\n"); 1047 printk("\ntesting %s compression\n", algo);
812 1048
813 tsize = sizeof (deflate_comp_tv_template); 1049 tsize = sizeof(struct comp_testvec);
1050 tsize *= ctcount;
814 if (tsize > TVMEMSIZE) { 1051 if (tsize > TVMEMSIZE) {
815 printk("template (%u) too big for tvmem (%u)\n", tsize, 1052 printk("template (%u) too big for tvmem (%u)\n", tsize,
816 TVMEMSIZE); 1053 TVMEMSIZE);
817 return; 1054 return;
818 } 1055 }
819 1056
820 memcpy(tvmem, deflate_comp_tv_template, tsize); 1057 memcpy(tvmem, ctemplate, tsize);
821 tv = (void *)tvmem; 1058 tv = (void *)tvmem;
822 1059
823 tfm = crypto_alloc_comp("deflate", 0, CRYPTO_ALG_ASYNC); 1060 tfm = crypto_alloc_comp(algo, 0, CRYPTO_ALG_ASYNC);
824 if (IS_ERR(tfm)) { 1061 if (IS_ERR(tfm)) {
825 printk("failed to load transform for deflate\n"); 1062 printk("failed to load transform for %s\n", algo);
826 return; 1063 return;
827 } 1064 }
828 1065
829 for (i = 0; i < DEFLATE_COMP_TEST_VECTORS; i++) { 1066 for (i = 0; i < ctcount; i++) {
830 int ilen, ret, dlen = COMP_BUF_SIZE; 1067 int ilen, ret, dlen = COMP_BUF_SIZE;
831 1068
832 printk("test %u:\n", i + 1); 1069 printk("test %u:\n", i + 1);
@@ -845,19 +1082,20 @@ static void test_deflate(void)
845 ilen, dlen); 1082 ilen, dlen);
846 } 1083 }
847 1084
848 printk("\ntesting deflate decompression\n"); 1085 printk("\ntesting %s decompression\n", algo);
849 1086
850 tsize = sizeof (deflate_decomp_tv_template); 1087 tsize = sizeof(struct comp_testvec);
1088 tsize *= dtcount;
851 if (tsize > TVMEMSIZE) { 1089 if (tsize > TVMEMSIZE) {
852 printk("template (%u) too big for tvmem (%u)\n", tsize, 1090 printk("template (%u) too big for tvmem (%u)\n", tsize,
853 TVMEMSIZE); 1091 TVMEMSIZE);
854 goto out; 1092 goto out;
855 } 1093 }
856 1094
857 memcpy(tvmem, deflate_decomp_tv_template, tsize); 1095 memcpy(tvmem, dtemplate, tsize);
858 tv = (void *)tvmem; 1096 tv = (void *)tvmem;
859 1097
860 for (i = 0; i < DEFLATE_DECOMP_TEST_VECTORS; i++) { 1098 for (i = 0; i < dtcount; i++) {
861 int ilen, ret, dlen = COMP_BUF_SIZE; 1099 int ilen, ret, dlen = COMP_BUF_SIZE;
862 1100
863 printk("test %u:\n", i + 1); 1101 printk("test %u:\n", i + 1);
@@ -918,6 +1156,8 @@ static void do_test(void)
918 1156
919 test_hash("md4", md4_tv_template, MD4_TEST_VECTORS); 1157 test_hash("md4", md4_tv_template, MD4_TEST_VECTORS);
920 1158
1159 test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS);
1160
921 test_hash("sha256", sha256_tv_template, SHA256_TEST_VECTORS); 1161 test_hash("sha256", sha256_tv_template, SHA256_TEST_VECTORS);
922 1162
923 //BLOWFISH 1163 //BLOWFISH
@@ -969,6 +1209,18 @@ static void do_test(void)
969 AES_XTS_ENC_TEST_VECTORS); 1209 AES_XTS_ENC_TEST_VECTORS);
970 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template, 1210 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
971 AES_XTS_DEC_TEST_VECTORS); 1211 AES_XTS_DEC_TEST_VECTORS);
1212 test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template,
1213 AES_CTR_ENC_TEST_VECTORS);
1214 test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template,
1215 AES_CTR_DEC_TEST_VECTORS);
1216 test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template,
1217 AES_GCM_ENC_TEST_VECTORS);
1218 test_aead("gcm(aes)", DECRYPT, aes_gcm_dec_tv_template,
1219 AES_GCM_DEC_TEST_VECTORS);
1220 test_aead("ccm(aes)", ENCRYPT, aes_ccm_enc_tv_template,
1221 AES_CCM_ENC_TEST_VECTORS);
1222 test_aead("ccm(aes)", DECRYPT, aes_ccm_dec_tv_template,
1223 AES_CCM_DEC_TEST_VECTORS);
972 1224
973 //CAST5 1225 //CAST5
974 test_cipher("ecb(cast5)", ENCRYPT, cast5_enc_tv_template, 1226 test_cipher("ecb(cast5)", ENCRYPT, cast5_enc_tv_template,
@@ -1057,12 +1309,18 @@ static void do_test(void)
1057 test_hash("tgr192", tgr192_tv_template, TGR192_TEST_VECTORS); 1309 test_hash("tgr192", tgr192_tv_template, TGR192_TEST_VECTORS);
1058 test_hash("tgr160", tgr160_tv_template, TGR160_TEST_VECTORS); 1310 test_hash("tgr160", tgr160_tv_template, TGR160_TEST_VECTORS);
1059 test_hash("tgr128", tgr128_tv_template, TGR128_TEST_VECTORS); 1311 test_hash("tgr128", tgr128_tv_template, TGR128_TEST_VECTORS);
1060 test_deflate(); 1312 test_comp("deflate", deflate_comp_tv_template,
1313 deflate_decomp_tv_template, DEFLATE_COMP_TEST_VECTORS,
1314 DEFLATE_DECOMP_TEST_VECTORS);
1315 test_comp("lzo", lzo_comp_tv_template, lzo_decomp_tv_template,
1316 LZO_COMP_TEST_VECTORS, LZO_DECOMP_TEST_VECTORS);
1061 test_hash("crc32c", crc32c_tv_template, CRC32C_TEST_VECTORS); 1317 test_hash("crc32c", crc32c_tv_template, CRC32C_TEST_VECTORS);
1062 test_hash("hmac(md5)", hmac_md5_tv_template, 1318 test_hash("hmac(md5)", hmac_md5_tv_template,
1063 HMAC_MD5_TEST_VECTORS); 1319 HMAC_MD5_TEST_VECTORS);
1064 test_hash("hmac(sha1)", hmac_sha1_tv_template, 1320 test_hash("hmac(sha1)", hmac_sha1_tv_template,
1065 HMAC_SHA1_TEST_VECTORS); 1321 HMAC_SHA1_TEST_VECTORS);
1322 test_hash("hmac(sha224)", hmac_sha224_tv_template,
1323 HMAC_SHA224_TEST_VECTORS);
1066 test_hash("hmac(sha256)", hmac_sha256_tv_template, 1324 test_hash("hmac(sha256)", hmac_sha256_tv_template,
1067 HMAC_SHA256_TEST_VECTORS); 1325 HMAC_SHA256_TEST_VECTORS);
1068 test_hash("hmac(sha384)", hmac_sha384_tv_template, 1326 test_hash("hmac(sha384)", hmac_sha384_tv_template,
@@ -1156,6 +1414,10 @@ static void do_test(void)
1156 AES_XTS_ENC_TEST_VECTORS); 1414 AES_XTS_ENC_TEST_VECTORS);
1157 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template, 1415 test_cipher("xts(aes)", DECRYPT, aes_xts_dec_tv_template,
1158 AES_XTS_DEC_TEST_VECTORS); 1416 AES_XTS_DEC_TEST_VECTORS);
1417 test_cipher("rfc3686(ctr(aes))", ENCRYPT, aes_ctr_enc_tv_template,
1418 AES_CTR_ENC_TEST_VECTORS);
1419 test_cipher("rfc3686(ctr(aes))", DECRYPT, aes_ctr_dec_tv_template,
1420 AES_CTR_DEC_TEST_VECTORS);
1159 break; 1421 break;
1160 1422
1161 case 11: 1423 case 11:
@@ -1167,7 +1429,9 @@ static void do_test(void)
1167 break; 1429 break;
1168 1430
1169 case 13: 1431 case 13:
1170 test_deflate(); 1432 test_comp("deflate", deflate_comp_tv_template,
1433 deflate_decomp_tv_template, DEFLATE_COMP_TEST_VECTORS,
1434 DEFLATE_DECOMP_TEST_VECTORS);
1171 break; 1435 break;
1172 1436
1173 case 14: 1437 case 14:
@@ -1291,6 +1555,34 @@ static void do_test(void)
1291 camellia_cbc_dec_tv_template, 1555 camellia_cbc_dec_tv_template,
1292 CAMELLIA_CBC_DEC_TEST_VECTORS); 1556 CAMELLIA_CBC_DEC_TEST_VECTORS);
1293 break; 1557 break;
1558 case 33:
1559 test_hash("sha224", sha224_tv_template, SHA224_TEST_VECTORS);
1560 break;
1561
1562 case 34:
1563 test_cipher("salsa20", ENCRYPT,
1564 salsa20_stream_enc_tv_template,
1565 SALSA20_STREAM_ENC_TEST_VECTORS);
1566 break;
1567
1568 case 35:
1569 test_aead("gcm(aes)", ENCRYPT, aes_gcm_enc_tv_template,
1570 AES_GCM_ENC_TEST_VECTORS);
1571 test_aead("gcm(aes)", DECRYPT, aes_gcm_dec_tv_template,
1572 AES_GCM_DEC_TEST_VECTORS);
1573 break;
1574
1575 case 36:
1576 test_comp("lzo", lzo_comp_tv_template, lzo_decomp_tv_template,
1577 LZO_COMP_TEST_VECTORS, LZO_DECOMP_TEST_VECTORS);
1578 break;
1579
1580 case 37:
1581 test_aead("ccm(aes)", ENCRYPT, aes_ccm_enc_tv_template,
1582 AES_CCM_ENC_TEST_VECTORS);
1583 test_aead("ccm(aes)", DECRYPT, aes_ccm_dec_tv_template,
1584 AES_CCM_DEC_TEST_VECTORS);
1585 break;
1294 1586
1295 case 100: 1587 case 100:
1296 test_hash("hmac(md5)", hmac_md5_tv_template, 1588 test_hash("hmac(md5)", hmac_md5_tv_template,
@@ -1317,6 +1609,15 @@ static void do_test(void)
1317 HMAC_SHA512_TEST_VECTORS); 1609 HMAC_SHA512_TEST_VECTORS);
1318 break; 1610 break;
1319 1611
1612 case 105:
1613 test_hash("hmac(sha224)", hmac_sha224_tv_template,
1614 HMAC_SHA224_TEST_VECTORS);
1615 break;
1616
1617 case 106:
1618 test_hash("xcbc(aes)", aes_xcbc128_tv_template,
1619 XCBC_AES_TEST_VECTORS);
1620 break;
1320 1621
1321 case 200: 1622 case 200:
1322 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0, 1623 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
@@ -1400,6 +1701,11 @@ static void do_test(void)
1400 camellia_speed_template); 1701 camellia_speed_template);
1401 break; 1702 break;
1402 1703
1704 case 206:
1705 test_cipher_speed("salsa20", ENCRYPT, sec, NULL, 0,
1706 salsa20_speed_template);
1707 break;
1708
1403 case 300: 1709 case 300:
1404 /* fall through */ 1710 /* fall through */
1405 1711
@@ -1451,6 +1757,10 @@ static void do_test(void)
1451 test_hash_speed("tgr192", sec, generic_hash_speed_template); 1757 test_hash_speed("tgr192", sec, generic_hash_speed_template);
1452 if (mode > 300 && mode < 400) break; 1758 if (mode > 300 && mode < 400) break;
1453 1759
1760 case 313:
1761 test_hash_speed("sha224", sec, generic_hash_speed_template);
1762 if (mode > 300 && mode < 400) break;
1763
1454 case 399: 1764 case 399:
1455 break; 1765 break;
1456 1766
@@ -1467,20 +1777,21 @@ static void do_test(void)
1467 1777
1468static int __init init(void) 1778static int __init init(void)
1469{ 1779{
1780 int err = -ENOMEM;
1781
1470 tvmem = kmalloc(TVMEMSIZE, GFP_KERNEL); 1782 tvmem = kmalloc(TVMEMSIZE, GFP_KERNEL);
1471 if (tvmem == NULL) 1783 if (tvmem == NULL)
1472 return -ENOMEM; 1784 return err;
1473 1785
1474 xbuf = kmalloc(XBUFSIZE, GFP_KERNEL); 1786 xbuf = kmalloc(XBUFSIZE, GFP_KERNEL);
1475 if (xbuf == NULL) { 1787 if (xbuf == NULL)
1476 kfree(tvmem); 1788 goto err_free_tv;
1477 return -ENOMEM;
1478 }
1479 1789
1480 do_test(); 1790 axbuf = kmalloc(XBUFSIZE, GFP_KERNEL);
1791 if (axbuf == NULL)
1792 goto err_free_xbuf;
1481 1793
1482 kfree(xbuf); 1794 do_test();
1483 kfree(tvmem);
1484 1795
1485 /* We intentionaly return -EAGAIN to prevent keeping 1796 /* We intentionaly return -EAGAIN to prevent keeping
1486 * the module. It does all its work from init() 1797 * the module. It does all its work from init()
@@ -1488,7 +1799,15 @@ static int __init init(void)
1488 * => we don't need it in the memory, do we? 1799 * => we don't need it in the memory, do we?
1489 * -- mludvig 1800 * -- mludvig
1490 */ 1801 */
1491 return -EAGAIN; 1802 err = -EAGAIN;
1803
1804 kfree(axbuf);
1805 err_free_xbuf:
1806 kfree(xbuf);
1807 err_free_tv:
1808 kfree(tvmem);
1809
1810 return err;
1492} 1811}
1493 1812
1494/* 1813/*
diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h
index ec861388d9a0..f785e5618e11 100644
--- a/crypto/tcrypt.h
+++ b/crypto/tcrypt.h
@@ -6,12 +6,15 @@
6 * 6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 8 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
9 * Copyright (c) 2007 Nokia Siemens Networks
9 * 10 *
10 * This program is free software; you can redistribute it and/or modify it 11 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free 12 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option) 13 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version. 14 * any later version.
14 * 15 *
16 * 2007-11-13 Added GCM tests
17 * 2007-11-13 Added AEAD support
15 * 2006-12-07 Added SHA384 HMAC and SHA512 HMAC tests 18 * 2006-12-07 Added SHA384 HMAC and SHA512 HMAC tests
16 * 2004-08-09 Cipher speed tests by Reyk Floeter <reyk@vantronix.net> 19 * 2004-08-09 Cipher speed tests by Reyk Floeter <reyk@vantronix.net>
17 * 2003-09-14 Changes by Kartikey Mahendra Bhatt 20 * 2003-09-14 Changes by Kartikey Mahendra Bhatt
@@ -40,14 +43,32 @@ struct hash_testvec {
40struct cipher_testvec { 43struct cipher_testvec {
41 char key[MAX_KEYLEN] __attribute__ ((__aligned__(4))); 44 char key[MAX_KEYLEN] __attribute__ ((__aligned__(4)));
42 char iv[MAX_IVLEN]; 45 char iv[MAX_IVLEN];
46 char input[4100];
47 char result[4100];
48 unsigned char tap[MAX_TAP];
49 int np;
50 unsigned char fail;
51 unsigned char wk; /* weak key flag */
52 unsigned char klen;
53 unsigned short ilen;
54 unsigned short rlen;
55};
56
57struct aead_testvec {
58 char key[MAX_KEYLEN] __attribute__ ((__aligned__(4)));
59 char iv[MAX_IVLEN];
43 char input[512]; 60 char input[512];
61 char assoc[512];
44 char result[512]; 62 char result[512];
45 unsigned char tap[MAX_TAP]; 63 unsigned char tap[MAX_TAP];
64 unsigned char atap[MAX_TAP];
46 int np; 65 int np;
66 int anp;
47 unsigned char fail; 67 unsigned char fail;
48 unsigned char wk; /* weak key flag */ 68 unsigned char wk; /* weak key flag */
49 unsigned char klen; 69 unsigned char klen;
50 unsigned short ilen; 70 unsigned short ilen;
71 unsigned short alen;
51 unsigned short rlen; 72 unsigned short rlen;
52}; 73};
53 74
@@ -173,6 +194,33 @@ static struct hash_testvec sha1_tv_template[] = {
173 } 194 }
174}; 195};
175 196
197
198/*
199 * SHA224 test vectors from from FIPS PUB 180-2
200 */
201#define SHA224_TEST_VECTORS 2
202
203static struct hash_testvec sha224_tv_template[] = {
204 {
205 .plaintext = "abc",
206 .psize = 3,
207 .digest = { 0x23, 0x09, 0x7D, 0x22, 0x34, 0x05, 0xD8, 0x22,
208 0x86, 0x42, 0xA4, 0x77, 0xBD, 0xA2, 0x55, 0xB3,
209 0x2A, 0xAD, 0xBC, 0xE4, 0xBD, 0xA0, 0xB3, 0xF7,
210 0xE3, 0x6C, 0x9D, 0xA7},
211 }, {
212 .plaintext =
213 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
214 .psize = 56,
215 .digest = { 0x75, 0x38, 0x8B, 0x16, 0x51, 0x27, 0x76, 0xCC,
216 0x5D, 0xBA, 0x5D, 0xA1, 0xFD, 0x89, 0x01, 0x50,
217 0xB0, 0xC6, 0x45, 0x5C, 0xB4, 0xF5, 0x8B, 0x19,
218 0x52, 0x52, 0x25, 0x25 },
219 .np = 2,
220 .tap = { 28, 28 }
221 }
222};
223
176/* 224/*
177 * SHA256 test vectors from from NIST 225 * SHA256 test vectors from from NIST
178 */ 226 */
@@ -817,6 +865,121 @@ static struct hash_testvec hmac_sha1_tv_template[] = {
817 }, 865 },
818}; 866};
819 867
868
869/*
870 * SHA224 HMAC test vectors from RFC4231
871 */
872#define HMAC_SHA224_TEST_VECTORS 4
873
874static struct hash_testvec hmac_sha224_tv_template[] = {
875 {
876 .key = { 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
877 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b, 0x0b,
878 0x0b, 0x0b, 0x0b, 0x0b },
879 .ksize = 20,
880 /* ("Hi There") */
881 .plaintext = { 0x48, 0x69, 0x20, 0x54, 0x68, 0x65, 0x72, 0x65 },
882 .psize = 8,
883 .digest = { 0x89, 0x6f, 0xb1, 0x12, 0x8a, 0xbb, 0xdf, 0x19,
884 0x68, 0x32, 0x10, 0x7c, 0xd4, 0x9d, 0xf3, 0x3f,
885 0x47, 0xb4, 0xb1, 0x16, 0x99, 0x12, 0xba, 0x4f,
886 0x53, 0x68, 0x4b, 0x22},
887 }, {
888 .key = { 0x4a, 0x65, 0x66, 0x65 }, /* ("Jefe") */
889 .ksize = 4,
890 /* ("what do ya want for nothing?") */
891 .plaintext = { 0x77, 0x68, 0x61, 0x74, 0x20, 0x64, 0x6f, 0x20,
892 0x79, 0x61, 0x20, 0x77, 0x61, 0x6e, 0x74, 0x20,
893 0x66, 0x6f, 0x72, 0x20, 0x6e, 0x6f, 0x74, 0x68,
894 0x69, 0x6e, 0x67, 0x3f },
895 .psize = 28,
896 .digest = { 0xa3, 0x0e, 0x01, 0x09, 0x8b, 0xc6, 0xdb, 0xbf,
897 0x45, 0x69, 0x0f, 0x3a, 0x7e, 0x9e, 0x6d, 0x0f,
898 0x8b, 0xbe, 0xa2, 0xa3, 0x9e, 0x61, 0x48, 0x00,
899 0x8f, 0xd0, 0x5e, 0x44 },
900 .np = 4,
901 .tap = { 7, 7, 7, 7 }
902 }, {
903 .key = { 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
904 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
905 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
906 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
907 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
908 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
909 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
910 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
911 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
912 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
913 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
914 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
915 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
916 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
917 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
918 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
919 0xaa, 0xaa, 0xaa },
920 .ksize = 131,
921 /* ("Test Using Larger Than Block-Size Key - Hash Key First") */
922 .plaintext = { 0x54, 0x65, 0x73, 0x74, 0x20, 0x55, 0x73, 0x69,
923 0x6e, 0x67, 0x20, 0x4c, 0x61, 0x72, 0x67, 0x65,
924 0x72, 0x20, 0x54, 0x68, 0x61, 0x6e, 0x20, 0x42,
925 0x6c, 0x6f, 0x63, 0x6b, 0x2d, 0x53, 0x69, 0x7a,
926 0x65, 0x20, 0x4b, 0x65, 0x79, 0x20, 0x2d, 0x20,
927 0x48, 0x61, 0x73, 0x68, 0x20, 0x4b, 0x65, 0x79,
928 0x20, 0x46, 0x69, 0x72, 0x73, 0x74 },
929 .psize = 54,
930 .digest = { 0x95, 0xe9, 0xa0, 0xdb, 0x96, 0x20, 0x95, 0xad,
931 0xae, 0xbe, 0x9b, 0x2d, 0x6f, 0x0d, 0xbc, 0xe2,
932 0xd4, 0x99, 0xf1, 0x12, 0xf2, 0xd2, 0xb7, 0x27,
933 0x3f, 0xa6, 0x87, 0x0e },
934 }, {
935 .key = { 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
936 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
937 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
938 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
939 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
940 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
941 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
942 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
943 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
944 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
945 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
946 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
947 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
948 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
949 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
950 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
951 0xaa, 0xaa, 0xaa },
952 .ksize = 131,
953 /* ("This is a test using a larger than block-size key and a")
954 (" larger than block-size data. The key needs to be")
955 (" hashed before being used by the HMAC algorithm.") */
956 .plaintext = { 0x54, 0x68, 0x69, 0x73, 0x20, 0x69, 0x73, 0x20,
957 0x61, 0x20, 0x74, 0x65, 0x73, 0x74, 0x20, 0x75,
958 0x73, 0x69, 0x6e, 0x67, 0x20, 0x61, 0x20, 0x6c,
959 0x61, 0x72, 0x67, 0x65, 0x72, 0x20, 0x74, 0x68,
960 0x61, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63, 0x6b,
961 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x20, 0x6b, 0x65,
962 0x79, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x61, 0x20,
963 0x6c, 0x61, 0x72, 0x67, 0x65, 0x72, 0x20, 0x74,
964 0x68, 0x61, 0x6e, 0x20, 0x62, 0x6c, 0x6f, 0x63,
965 0x6b, 0x2d, 0x73, 0x69, 0x7a, 0x65, 0x20, 0x64,
966 0x61, 0x74, 0x61, 0x2e, 0x20, 0x54, 0x68, 0x65,
967 0x20, 0x6b, 0x65, 0x79, 0x20, 0x6e, 0x65, 0x65,
968 0x64, 0x73, 0x20, 0x74, 0x6f, 0x20, 0x62, 0x65,
969 0x20, 0x68, 0x61, 0x73, 0x68, 0x65, 0x64, 0x20,
970 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x20, 0x62,
971 0x65, 0x69, 0x6e, 0x67, 0x20, 0x75, 0x73, 0x65,
972 0x64, 0x20, 0x62, 0x79, 0x20, 0x74, 0x68, 0x65,
973 0x20, 0x48, 0x4d, 0x41, 0x43, 0x20, 0x61, 0x6c,
974 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x2e },
975 .psize = 152,
976 .digest = { 0x3a, 0x85, 0x41, 0x66, 0xac, 0x5d, 0x9f, 0x02,
977 0x3f, 0x54, 0xd5, 0x17, 0xd0, 0xb3, 0x9d, 0xbd,
978 0x94, 0x67, 0x70, 0xdb, 0x9c, 0x2b, 0x95, 0xc9,
979 0xf6, 0xf5, 0x65, 0xd1 },
980 },
981};
982
820/* 983/*
821 * HMAC-SHA256 test vectors from 984 * HMAC-SHA256 test vectors from
822 * draft-ietf-ipsec-ciph-sha-256-01.txt 985 * draft-ietf-ipsec-ciph-sha-256-01.txt
@@ -2140,12 +2303,18 @@ static struct cipher_testvec cast6_dec_tv_template[] = {
2140 */ 2303 */
2141#define AES_ENC_TEST_VECTORS 3 2304#define AES_ENC_TEST_VECTORS 3
2142#define AES_DEC_TEST_VECTORS 3 2305#define AES_DEC_TEST_VECTORS 3
2143#define AES_CBC_ENC_TEST_VECTORS 2 2306#define AES_CBC_ENC_TEST_VECTORS 4
2144#define AES_CBC_DEC_TEST_VECTORS 2 2307#define AES_CBC_DEC_TEST_VECTORS 4
2145#define AES_LRW_ENC_TEST_VECTORS 8 2308#define AES_LRW_ENC_TEST_VECTORS 8
2146#define AES_LRW_DEC_TEST_VECTORS 8 2309#define AES_LRW_DEC_TEST_VECTORS 8
2147#define AES_XTS_ENC_TEST_VECTORS 4 2310#define AES_XTS_ENC_TEST_VECTORS 4
2148#define AES_XTS_DEC_TEST_VECTORS 4 2311#define AES_XTS_DEC_TEST_VECTORS 4
2312#define AES_CTR_ENC_TEST_VECTORS 7
2313#define AES_CTR_DEC_TEST_VECTORS 6
2314#define AES_GCM_ENC_TEST_VECTORS 9
2315#define AES_GCM_DEC_TEST_VECTORS 8
2316#define AES_CCM_ENC_TEST_VECTORS 7
2317#define AES_CCM_DEC_TEST_VECTORS 7
2149 2318
2150static struct cipher_testvec aes_enc_tv_template[] = { 2319static struct cipher_testvec aes_enc_tv_template[] = {
2151 { /* From FIPS-197 */ 2320 { /* From FIPS-197 */
@@ -2249,6 +2418,57 @@ static struct cipher_testvec aes_cbc_enc_tv_template[] = {
2249 0x75, 0x86, 0x60, 0x2d, 0x25, 0x3c, 0xff, 0xf9, 2418 0x75, 0x86, 0x60, 0x2d, 0x25, 0x3c, 0xff, 0xf9,
2250 0x1b, 0x82, 0x66, 0xbe, 0xa6, 0xd6, 0x1a, 0xb1 }, 2419 0x1b, 0x82, 0x66, 0xbe, 0xa6, 0xd6, 0x1a, 0xb1 },
2251 .rlen = 32, 2420 .rlen = 32,
2421 }, { /* From NIST SP800-38A */
2422 .key = { 0x8e, 0x73, 0xb0, 0xf7, 0xda, 0x0e, 0x64, 0x52,
2423 0xc8, 0x10, 0xf3, 0x2b, 0x80, 0x90, 0x79, 0xe5,
2424 0x62, 0xf8, 0xea, 0xd2, 0x52, 0x2c, 0x6b, 0x7b },
2425 .klen = 24,
2426 .iv = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
2427 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
2428 .input = { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
2429 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a,
2430 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
2431 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51,
2432 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
2433 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef,
2434 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
2435 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
2436 .ilen = 64,
2437 .result = { 0x4f, 0x02, 0x1d, 0xb2, 0x43, 0xbc, 0x63, 0x3d,
2438 0x71, 0x78, 0x18, 0x3a, 0x9f, 0xa0, 0x71, 0xe8,
2439 0xb4, 0xd9, 0xad, 0xa9, 0xad, 0x7d, 0xed, 0xf4,
2440 0xe5, 0xe7, 0x38, 0x76, 0x3f, 0x69, 0x14, 0x5a,
2441 0x57, 0x1b, 0x24, 0x20, 0x12, 0xfb, 0x7a, 0xe0,
2442 0x7f, 0xa9, 0xba, 0xac, 0x3d, 0xf1, 0x02, 0xe0,
2443 0x08, 0xb0, 0xe2, 0x79, 0x88, 0x59, 0x88, 0x81,
2444 0xd9, 0x20, 0xa9, 0xe6, 0x4f, 0x56, 0x15, 0xcd },
2445 .rlen = 64,
2446 }, {
2447 .key = { 0x60, 0x3d, 0xeb, 0x10, 0x15, 0xca, 0x71, 0xbe,
2448 0x2b, 0x73, 0xae, 0xf0, 0x85, 0x7d, 0x77, 0x81,
2449 0x1f, 0x35, 0x2c, 0x07, 0x3b, 0x61, 0x08, 0xd7,
2450 0x2d, 0x98, 0x10, 0xa3, 0x09, 0x14, 0xdf, 0xf4 },
2451 .klen = 32,
2452 .iv = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
2453 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
2454 .input = { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
2455 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a,
2456 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
2457 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51,
2458 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
2459 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef,
2460 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
2461 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
2462 .ilen = 64,
2463 .result = { 0xf5, 0x8c, 0x4c, 0x04, 0xd6, 0xe5, 0xf1, 0xba,
2464 0x77, 0x9e, 0xab, 0xfb, 0x5f, 0x7b, 0xfb, 0xd6,
2465 0x9c, 0xfc, 0x4e, 0x96, 0x7e, 0xdb, 0x80, 0x8d,
2466 0x67, 0x9f, 0x77, 0x7b, 0xc6, 0x70, 0x2c, 0x7d,
2467 0x39, 0xf2, 0x33, 0x69, 0xa9, 0xd9, 0xba, 0xcf,
2468 0xa5, 0x30, 0xe2, 0x63, 0x04, 0x23, 0x14, 0x61,
2469 0xb2, 0xeb, 0x05, 0xe2, 0xc3, 0x9b, 0xe9, 0xfc,
2470 0xda, 0x6c, 0x19, 0x07, 0x8c, 0x6a, 0x9d, 0x1b },
2471 .rlen = 64,
2252 }, 2472 },
2253}; 2473};
2254 2474
@@ -2280,6 +2500,57 @@ static struct cipher_testvec aes_cbc_dec_tv_template[] = {
2280 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 2500 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
2281 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f }, 2501 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
2282 .rlen = 32, 2502 .rlen = 32,
2503 }, { /* From NIST SP800-38A */
2504 .key = { 0x8e, 0x73, 0xb0, 0xf7, 0xda, 0x0e, 0x64, 0x52,
2505 0xc8, 0x10, 0xf3, 0x2b, 0x80, 0x90, 0x79, 0xe5,
2506 0x62, 0xf8, 0xea, 0xd2, 0x52, 0x2c, 0x6b, 0x7b },
2507 .klen = 24,
2508 .iv = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
2509 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
2510 .input = { 0x4f, 0x02, 0x1d, 0xb2, 0x43, 0xbc, 0x63, 0x3d,
2511 0x71, 0x78, 0x18, 0x3a, 0x9f, 0xa0, 0x71, 0xe8,
2512 0xb4, 0xd9, 0xad, 0xa9, 0xad, 0x7d, 0xed, 0xf4,
2513 0xe5, 0xe7, 0x38, 0x76, 0x3f, 0x69, 0x14, 0x5a,
2514 0x57, 0x1b, 0x24, 0x20, 0x12, 0xfb, 0x7a, 0xe0,
2515 0x7f, 0xa9, 0xba, 0xac, 0x3d, 0xf1, 0x02, 0xe0,
2516 0x08, 0xb0, 0xe2, 0x79, 0x88, 0x59, 0x88, 0x81,
2517 0xd9, 0x20, 0xa9, 0xe6, 0x4f, 0x56, 0x15, 0xcd },
2518 .ilen = 64,
2519 .result = { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
2520 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a,
2521 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
2522 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51,
2523 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
2524 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef,
2525 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
2526 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
2527 .rlen = 64,
2528 }, {
2529 .key = { 0x60, 0x3d, 0xeb, 0x10, 0x15, 0xca, 0x71, 0xbe,
2530 0x2b, 0x73, 0xae, 0xf0, 0x85, 0x7d, 0x77, 0x81,
2531 0x1f, 0x35, 0x2c, 0x07, 0x3b, 0x61, 0x08, 0xd7,
2532 0x2d, 0x98, 0x10, 0xa3, 0x09, 0x14, 0xdf, 0xf4 },
2533 .klen = 32,
2534 .iv = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
2535 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
2536 .input = { 0xf5, 0x8c, 0x4c, 0x04, 0xd6, 0xe5, 0xf1, 0xba,
2537 0x77, 0x9e, 0xab, 0xfb, 0x5f, 0x7b, 0xfb, 0xd6,
2538 0x9c, 0xfc, 0x4e, 0x96, 0x7e, 0xdb, 0x80, 0x8d,
2539 0x67, 0x9f, 0x77, 0x7b, 0xc6, 0x70, 0x2c, 0x7d,
2540 0x39, 0xf2, 0x33, 0x69, 0xa9, 0xd9, 0xba, 0xcf,
2541 0xa5, 0x30, 0xe2, 0x63, 0x04, 0x23, 0x14, 0x61,
2542 0xb2, 0xeb, 0x05, 0xe2, 0xc3, 0x9b, 0xe9, 0xfc,
2543 0xda, 0x6c, 0x19, 0x07, 0x8c, 0x6a, 0x9d, 0x1b },
2544 .ilen = 64,
2545 .result = { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
2546 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a,
2547 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
2548 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51,
2549 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
2550 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef,
2551 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
2552 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
2553 .rlen = 64,
2283 }, 2554 },
2284}; 2555};
2285 2556
@@ -3180,6 +3451,1843 @@ static struct cipher_testvec aes_xts_dec_tv_template[] = {
3180 } 3451 }
3181}; 3452};
3182 3453
3454
3455static struct cipher_testvec aes_ctr_enc_tv_template[] = {
3456 { /* From RFC 3686 */
3457 .key = { 0xae, 0x68, 0x52, 0xf8, 0x12, 0x10, 0x67, 0xcc,
3458 0x4b, 0xf7, 0xa5, 0x76, 0x55, 0x77, 0xf3, 0x9e,
3459 0x00, 0x00, 0x00, 0x30 },
3460 .klen = 20,
3461 .iv = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
3462 .input = { "Single block msg" },
3463 .ilen = 16,
3464 .result = { 0xe4, 0x09, 0x5d, 0x4f, 0xb7, 0xa7, 0xb3, 0x79,
3465 0x2d, 0x61, 0x75, 0xa3, 0x26, 0x13, 0x11, 0xb8 },
3466 .rlen = 16,
3467 }, {
3468 .key = { 0x7e, 0x24, 0x06, 0x78, 0x17, 0xfa, 0xe0, 0xd7,
3469 0x43, 0xd6, 0xce, 0x1f, 0x32, 0x53, 0x91, 0x63,
3470 0x00, 0x6c, 0xb6, 0xdb },
3471 .klen = 20,
3472 .iv = { 0xc0, 0x54, 0x3b, 0x59, 0xda, 0x48, 0xd9, 0x0b },
3473 .input = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
3474 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
3475 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
3476 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
3477 .ilen = 32,
3478 .result = { 0x51, 0x04, 0xa1, 0x06, 0x16, 0x8a, 0x72, 0xd9,
3479 0x79, 0x0d, 0x41, 0xee, 0x8e, 0xda, 0xd3, 0x88,
3480 0xeb, 0x2e, 0x1e, 0xfc, 0x46, 0xda, 0x57, 0xc8,
3481 0xfc, 0xe6, 0x30, 0xdf, 0x91, 0x41, 0xbe, 0x28 },
3482 .rlen = 32,
3483 }, {
3484 .key = { 0x16, 0xaf, 0x5b, 0x14, 0x5f, 0xc9, 0xf5, 0x79,
3485 0xc1, 0x75, 0xf9, 0x3e, 0x3b, 0xfb, 0x0e, 0xed,
3486 0x86, 0x3d, 0x06, 0xcc, 0xfd, 0xb7, 0x85, 0x15,
3487 0x00, 0x00, 0x00, 0x48 },
3488 .klen = 28,
3489 .iv = { 0x36, 0x73, 0x3c, 0x14, 0x7d, 0x6d, 0x93, 0xcb },
3490 .input = { "Single block msg" },
3491 .ilen = 16,
3492 .result = { 0x4b, 0x55, 0x38, 0x4f, 0xe2, 0x59, 0xc9, 0xc8,
3493 0x4e, 0x79, 0x35, 0xa0, 0x03, 0xcb, 0xe9, 0x28 },
3494 .rlen = 16,
3495 }, {
3496 .key = { 0x7c, 0x5c, 0xb2, 0x40, 0x1b, 0x3d, 0xc3, 0x3c,
3497 0x19, 0xe7, 0x34, 0x08, 0x19, 0xe0, 0xf6, 0x9c,
3498 0x67, 0x8c, 0x3d, 0xb8, 0xe6, 0xf6, 0xa9, 0x1a,
3499 0x00, 0x96, 0xb0, 0x3b },
3500 .klen = 28,
3501 .iv = { 0x02, 0x0c, 0x6e, 0xad, 0xc2, 0xcb, 0x50, 0x0d },
3502 .input = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
3503 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
3504 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
3505 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
3506 .ilen = 32,
3507 .result = { 0x45, 0x32, 0x43, 0xfc, 0x60, 0x9b, 0x23, 0x32,
3508 0x7e, 0xdf, 0xaa, 0xfa, 0x71, 0x31, 0xcd, 0x9f,
3509 0x84, 0x90, 0x70, 0x1c, 0x5a, 0xd4, 0xa7, 0x9c,
3510 0xfc, 0x1f, 0xe0, 0xff, 0x42, 0xf4, 0xfb, 0x00 },
3511 .rlen = 32,
3512 }, {
3513 .key = { 0x77, 0x6b, 0xef, 0xf2, 0x85, 0x1d, 0xb0, 0x6f,
3514 0x4c, 0x8a, 0x05, 0x42, 0xc8, 0x69, 0x6f, 0x6c,
3515 0x6a, 0x81, 0xaf, 0x1e, 0xec, 0x96, 0xb4, 0xd3,
3516 0x7f, 0xc1, 0xd6, 0x89, 0xe6, 0xc1, 0xc1, 0x04,
3517 0x00, 0x00, 0x00, 0x60 },
3518 .klen = 36,
3519 .iv = { 0xdb, 0x56, 0x72, 0xc9, 0x7a, 0xa8, 0xf0, 0xb2 },
3520 .input = { "Single block msg" },
3521 .ilen = 16,
3522 .result = { 0x14, 0x5a, 0xd0, 0x1d, 0xbf, 0x82, 0x4e, 0xc7,
3523 0x56, 0x08, 0x63, 0xdc, 0x71, 0xe3, 0xe0, 0xc0 },
3524 .rlen = 16,
3525 }, {
3526 .key = { 0xf6, 0xd6, 0x6d, 0x6b, 0xd5, 0x2d, 0x59, 0xbb,
3527 0x07, 0x96, 0x36, 0x58, 0x79, 0xef, 0xf8, 0x86,
3528 0xc6, 0x6d, 0xd5, 0x1a, 0x5b, 0x6a, 0x99, 0x74,
3529 0x4b, 0x50, 0x59, 0x0c, 0x87, 0xa2, 0x38, 0x84,
3530 0x00, 0xfa, 0xac, 0x24 },
3531 .klen = 36,
3532 .iv = { 0xc1, 0x58, 0x5e, 0xf1, 0x5a, 0x43, 0xd8, 0x75 },
3533 .input = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
3534 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
3535 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
3536 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
3537 .ilen = 32,
3538 .result = { 0xf0, 0x5e, 0x23, 0x1b, 0x38, 0x94, 0x61, 0x2c,
3539 0x49, 0xee, 0x00, 0x0b, 0x80, 0x4e, 0xb2, 0xa9,
3540 0xb8, 0x30, 0x6b, 0x50, 0x8f, 0x83, 0x9d, 0x6a,
3541 0x55, 0x30, 0x83, 0x1d, 0x93, 0x44, 0xaf, 0x1c },
3542 .rlen = 32,
3543 }, {
3544 // generated using Crypto++
3545 .key = {
3546 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
3547 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
3548 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
3549 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
3550 0x00, 0x00, 0x00, 0x00,
3551 },
3552 .klen = 32 + 4,
3553 .iv = {
3554 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3555 },
3556 .input = {
3557 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
3558 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
3559 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
3560 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
3561 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
3562 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
3563 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
3564 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
3565 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
3566 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
3567 0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
3568 0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
3569 0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67,
3570 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f,
3571 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77,
3572 0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
3573 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
3574 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
3575 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
3576 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
3577 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
3578 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
3579 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
3580 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
3581 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
3582 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf,
3583 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
3584 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf,
3585 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
3586 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef,
3587 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
3588 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff,
3589 0x00, 0x03, 0x06, 0x09, 0x0c, 0x0f, 0x12, 0x15,
3590 0x18, 0x1b, 0x1e, 0x21, 0x24, 0x27, 0x2a, 0x2d,
3591 0x30, 0x33, 0x36, 0x39, 0x3c, 0x3f, 0x42, 0x45,
3592 0x48, 0x4b, 0x4e, 0x51, 0x54, 0x57, 0x5a, 0x5d,
3593 0x60, 0x63, 0x66, 0x69, 0x6c, 0x6f, 0x72, 0x75,
3594 0x78, 0x7b, 0x7e, 0x81, 0x84, 0x87, 0x8a, 0x8d,
3595 0x90, 0x93, 0x96, 0x99, 0x9c, 0x9f, 0xa2, 0xa5,
3596 0xa8, 0xab, 0xae, 0xb1, 0xb4, 0xb7, 0xba, 0xbd,
3597 0xc0, 0xc3, 0xc6, 0xc9, 0xcc, 0xcf, 0xd2, 0xd5,
3598 0xd8, 0xdb, 0xde, 0xe1, 0xe4, 0xe7, 0xea, 0xed,
3599 0xf0, 0xf3, 0xf6, 0xf9, 0xfc, 0xff, 0x02, 0x05,
3600 0x08, 0x0b, 0x0e, 0x11, 0x14, 0x17, 0x1a, 0x1d,
3601 0x20, 0x23, 0x26, 0x29, 0x2c, 0x2f, 0x32, 0x35,
3602 0x38, 0x3b, 0x3e, 0x41, 0x44, 0x47, 0x4a, 0x4d,
3603 0x50, 0x53, 0x56, 0x59, 0x5c, 0x5f, 0x62, 0x65,
3604 0x68, 0x6b, 0x6e, 0x71, 0x74, 0x77, 0x7a, 0x7d,
3605 0x80, 0x83, 0x86, 0x89, 0x8c, 0x8f, 0x92, 0x95,
3606 0x98, 0x9b, 0x9e, 0xa1, 0xa4, 0xa7, 0xaa, 0xad,
3607 0xb0, 0xb3, 0xb6, 0xb9, 0xbc, 0xbf, 0xc2, 0xc5,
3608 0xc8, 0xcb, 0xce, 0xd1, 0xd4, 0xd7, 0xda, 0xdd,
3609 0xe0, 0xe3, 0xe6, 0xe9, 0xec, 0xef, 0xf2, 0xf5,
3610 0xf8, 0xfb, 0xfe, 0x01, 0x04, 0x07, 0x0a, 0x0d,
3611 0x10, 0x13, 0x16, 0x19, 0x1c, 0x1f, 0x22, 0x25,
3612 0x28, 0x2b, 0x2e, 0x31, 0x34, 0x37, 0x3a, 0x3d,
3613 0x40, 0x43, 0x46, 0x49, 0x4c, 0x4f, 0x52, 0x55,
3614 0x58, 0x5b, 0x5e, 0x61, 0x64, 0x67, 0x6a, 0x6d,
3615 0x70, 0x73, 0x76, 0x79, 0x7c, 0x7f, 0x82, 0x85,
3616 0x88, 0x8b, 0x8e, 0x91, 0x94, 0x97, 0x9a, 0x9d,
3617 0xa0, 0xa3, 0xa6, 0xa9, 0xac, 0xaf, 0xb2, 0xb5,
3618 0xb8, 0xbb, 0xbe, 0xc1, 0xc4, 0xc7, 0xca, 0xcd,
3619 0xd0, 0xd3, 0xd6, 0xd9, 0xdc, 0xdf, 0xe2, 0xe5,
3620 0xe8, 0xeb, 0xee, 0xf1, 0xf4, 0xf7, 0xfa, 0xfd,
3621 0x00, 0x05, 0x0a, 0x0f, 0x14, 0x19, 0x1e, 0x23,
3622 0x28, 0x2d, 0x32, 0x37, 0x3c, 0x41, 0x46, 0x4b,
3623 0x50, 0x55, 0x5a, 0x5f, 0x64, 0x69, 0x6e, 0x73,
3624 0x78, 0x7d, 0x82, 0x87, 0x8c, 0x91, 0x96, 0x9b,
3625 0xa0, 0xa5, 0xaa, 0xaf, 0xb4, 0xb9, 0xbe, 0xc3,
3626 0xc8, 0xcd, 0xd2, 0xd7, 0xdc, 0xe1, 0xe6, 0xeb,
3627 0xf0, 0xf5, 0xfa, 0xff, 0x04, 0x09, 0x0e, 0x13,
3628 0x18, 0x1d, 0x22, 0x27, 0x2c, 0x31, 0x36, 0x3b,
3629 0x40, 0x45, 0x4a, 0x4f, 0x54, 0x59, 0x5e, 0x63,
3630 0x68, 0x6d, 0x72, 0x77, 0x7c, 0x81, 0x86, 0x8b,
3631 0x90, 0x95, 0x9a, 0x9f, 0xa4, 0xa9, 0xae, 0xb3,
3632 0xb8, 0xbd, 0xc2, 0xc7, 0xcc, 0xd1, 0xd6, 0xdb,
3633 0xe0, 0xe5, 0xea, 0xef, 0xf4, 0xf9, 0xfe, 0x03,
3634 0x08, 0x0d, 0x12, 0x17, 0x1c, 0x21, 0x26, 0x2b,
3635 0x30, 0x35, 0x3a, 0x3f, 0x44, 0x49, 0x4e, 0x53,
3636 0x58, 0x5d, 0x62, 0x67, 0x6c, 0x71, 0x76, 0x7b,
3637 0x80, 0x85, 0x8a, 0x8f, 0x94, 0x99, 0x9e, 0xa3,
3638 0xa8, 0xad, 0xb2, 0xb7, 0xbc, 0xc1, 0xc6, 0xcb,
3639 0xd0, 0xd5, 0xda, 0xdf, 0xe4, 0xe9, 0xee, 0xf3,
3640 0xf8, 0xfd, 0x02, 0x07, 0x0c, 0x11, 0x16, 0x1b,
3641 0x20, 0x25, 0x2a, 0x2f, 0x34, 0x39, 0x3e, 0x43,
3642 0x48, 0x4d, 0x52, 0x57, 0x5c, 0x61, 0x66, 0x6b,
3643 0x70, 0x75, 0x7a, 0x7f, 0x84, 0x89, 0x8e, 0x93,
3644 0x98, 0x9d, 0xa2, 0xa7, 0xac, 0xb1, 0xb6, 0xbb,
3645 0xc0, 0xc5, 0xca, 0xcf, 0xd4, 0xd9, 0xde, 0xe3,
3646 0xe8, 0xed, 0xf2, 0xf7, 0xfc, 0x01, 0x06, 0x0b,
3647 0x10, 0x15, 0x1a, 0x1f, 0x24, 0x29, 0x2e, 0x33,
3648 0x38, 0x3d, 0x42, 0x47, 0x4c, 0x51, 0x56, 0x5b,
3649 0x60, 0x65, 0x6a, 0x6f, 0x74, 0x79, 0x7e, 0x83,
3650 0x88, 0x8d, 0x92, 0x97, 0x9c, 0xa1, 0xa6, 0xab,
3651 0xb0, 0xb5, 0xba, 0xbf, 0xc4, 0xc9, 0xce, 0xd3,
3652 0xd8, 0xdd, 0xe2, 0xe7, 0xec, 0xf1, 0xf6, 0xfb,
3653 0x00, 0x07, 0x0e, 0x15, 0x1c, 0x23, 0x2a, 0x31,
3654 0x38, 0x3f, 0x46, 0x4d, 0x54, 0x5b, 0x62, 0x69,
3655 0x70, 0x77, 0x7e, 0x85, 0x8c, 0x93, 0x9a, 0xa1,
3656 0xa8, 0xaf, 0xb6, 0xbd, 0xc4, 0xcb, 0xd2, 0xd9,
3657 0xe0, 0xe7, 0xee, 0xf5, 0xfc, 0x03, 0x0a, 0x11,
3658 0x18, 0x1f, 0x26, 0x2d, 0x34, 0x3b, 0x42, 0x49,
3659 0x50, 0x57, 0x5e, 0x65, 0x6c, 0x73, 0x7a, 0x81,
3660 0x88, 0x8f, 0x96, 0x9d, 0xa4, 0xab, 0xb2, 0xb9,
3661 0xc0, 0xc7, 0xce, 0xd5, 0xdc, 0xe3, 0xea, 0xf1,
3662 0xf8, 0xff, 0x06, 0x0d, 0x14, 0x1b, 0x22, 0x29,
3663 0x30, 0x37, 0x3e, 0x45, 0x4c, 0x53, 0x5a, 0x61,
3664 0x68, 0x6f, 0x76, 0x7d, 0x84, 0x8b, 0x92, 0x99,
3665 0xa0, 0xa7, 0xae, 0xb5, 0xbc, 0xc3, 0xca, 0xd1,
3666 0xd8, 0xdf, 0xe6, 0xed, 0xf4, 0xfb, 0x02, 0x09,
3667 0x10, 0x17, 0x1e, 0x25, 0x2c, 0x33, 0x3a, 0x41,
3668 0x48, 0x4f, 0x56, 0x5d, 0x64, 0x6b, 0x72, 0x79,
3669 0x80, 0x87, 0x8e, 0x95, 0x9c, 0xa3, 0xaa, 0xb1,
3670 0xb8, 0xbf, 0xc6, 0xcd, 0xd4, 0xdb, 0xe2, 0xe9,
3671 0xf0, 0xf7, 0xfe, 0x05, 0x0c, 0x13, 0x1a, 0x21,
3672 0x28, 0x2f, 0x36, 0x3d, 0x44, 0x4b, 0x52, 0x59,
3673 0x60, 0x67, 0x6e, 0x75, 0x7c, 0x83, 0x8a, 0x91,
3674 0x98, 0x9f, 0xa6, 0xad, 0xb4, 0xbb, 0xc2, 0xc9,
3675 0xd0, 0xd7, 0xde, 0xe5, 0xec, 0xf3, 0xfa, 0x01,
3676 0x08, 0x0f, 0x16, 0x1d, 0x24, 0x2b, 0x32, 0x39,
3677 0x40, 0x47, 0x4e, 0x55, 0x5c, 0x63, 0x6a, 0x71,
3678 0x78, 0x7f, 0x86, 0x8d, 0x94, 0x9b, 0xa2, 0xa9,
3679 0xb0, 0xb7, 0xbe, 0xc5, 0xcc, 0xd3, 0xda, 0xe1,
3680 0xe8, 0xef, 0xf6, 0xfd, 0x04, 0x0b, 0x12, 0x19,
3681 0x20, 0x27, 0x2e, 0x35, 0x3c, 0x43, 0x4a, 0x51,
3682 0x58, 0x5f, 0x66, 0x6d, 0x74, 0x7b, 0x82, 0x89,
3683 0x90, 0x97, 0x9e, 0xa5, 0xac, 0xb3, 0xba, 0xc1,
3684 0xc8, 0xcf, 0xd6, 0xdd, 0xe4, 0xeb, 0xf2, 0xf9,
3685 0x00, 0x09, 0x12, 0x1b, 0x24, 0x2d, 0x36, 0x3f,
3686 0x48, 0x51, 0x5a, 0x63, 0x6c, 0x75, 0x7e, 0x87,
3687 0x90, 0x99, 0xa2, 0xab, 0xb4, 0xbd, 0xc6, 0xcf,
3688 0xd8, 0xe1, 0xea, 0xf3, 0xfc, 0x05, 0x0e, 0x17,
3689 0x20, 0x29, 0x32, 0x3b, 0x44, 0x4d, 0x56, 0x5f,
3690 0x68, 0x71, 0x7a, 0x83, 0x8c, 0x95, 0x9e, 0xa7,
3691 0xb0, 0xb9, 0xc2, 0xcb, 0xd4, 0xdd, 0xe6, 0xef,
3692 0xf8, 0x01, 0x0a, 0x13, 0x1c, 0x25, 0x2e, 0x37,
3693 0x40, 0x49, 0x52, 0x5b, 0x64, 0x6d, 0x76, 0x7f,
3694 0x88, 0x91, 0x9a, 0xa3, 0xac, 0xb5, 0xbe, 0xc7,
3695 0xd0, 0xd9, 0xe2, 0xeb, 0xf4, 0xfd, 0x06, 0x0f,
3696 0x18, 0x21, 0x2a, 0x33, 0x3c, 0x45, 0x4e, 0x57,
3697 0x60, 0x69, 0x72, 0x7b, 0x84, 0x8d, 0x96, 0x9f,
3698 0xa8, 0xb1, 0xba, 0xc3, 0xcc, 0xd5, 0xde, 0xe7,
3699 0xf0, 0xf9, 0x02, 0x0b, 0x14, 0x1d, 0x26, 0x2f,
3700 0x38, 0x41, 0x4a, 0x53, 0x5c, 0x65, 0x6e, 0x77,
3701 0x80, 0x89, 0x92, 0x9b, 0xa4, 0xad, 0xb6, 0xbf,
3702 0xc8, 0xd1, 0xda, 0xe3, 0xec, 0xf5, 0xfe, 0x07,
3703 0x10, 0x19, 0x22, 0x2b, 0x34, 0x3d, 0x46, 0x4f,
3704 0x58, 0x61, 0x6a, 0x73, 0x7c, 0x85, 0x8e, 0x97,
3705 0xa0, 0xa9, 0xb2, 0xbb, 0xc4, 0xcd, 0xd6, 0xdf,
3706 0xe8, 0xf1, 0xfa, 0x03, 0x0c, 0x15, 0x1e, 0x27,
3707 0x30, 0x39, 0x42, 0x4b, 0x54, 0x5d, 0x66, 0x6f,
3708 0x78, 0x81, 0x8a, 0x93, 0x9c, 0xa5, 0xae, 0xb7,
3709 0xc0, 0xc9, 0xd2, 0xdb, 0xe4, 0xed, 0xf6, 0xff,
3710 0x08, 0x11, 0x1a, 0x23, 0x2c, 0x35, 0x3e, 0x47,
3711 0x50, 0x59, 0x62, 0x6b, 0x74, 0x7d, 0x86, 0x8f,
3712 0x98, 0xa1, 0xaa, 0xb3, 0xbc, 0xc5, 0xce, 0xd7,
3713 0xe0, 0xe9, 0xf2, 0xfb, 0x04, 0x0d, 0x16, 0x1f,
3714 0x28, 0x31, 0x3a, 0x43, 0x4c, 0x55, 0x5e, 0x67,
3715 0x70, 0x79, 0x82, 0x8b, 0x94, 0x9d, 0xa6, 0xaf,
3716 0xb8, 0xc1, 0xca, 0xd3, 0xdc, 0xe5, 0xee, 0xf7,
3717 0x00, 0x0b, 0x16, 0x21, 0x2c, 0x37, 0x42, 0x4d,
3718 0x58, 0x63, 0x6e, 0x79, 0x84, 0x8f, 0x9a, 0xa5,
3719 0xb0, 0xbb, 0xc6, 0xd1, 0xdc, 0xe7, 0xf2, 0xfd,
3720 0x08, 0x13, 0x1e, 0x29, 0x34, 0x3f, 0x4a, 0x55,
3721 0x60, 0x6b, 0x76, 0x81, 0x8c, 0x97, 0xa2, 0xad,
3722 0xb8, 0xc3, 0xce, 0xd9, 0xe4, 0xef, 0xfa, 0x05,
3723 0x10, 0x1b, 0x26, 0x31, 0x3c, 0x47, 0x52, 0x5d,
3724 0x68, 0x73, 0x7e, 0x89, 0x94, 0x9f, 0xaa, 0xb5,
3725 0xc0, 0xcb, 0xd6, 0xe1, 0xec, 0xf7, 0x02, 0x0d,
3726 0x18, 0x23, 0x2e, 0x39, 0x44, 0x4f, 0x5a, 0x65,
3727 0x70, 0x7b, 0x86, 0x91, 0x9c, 0xa7, 0xb2, 0xbd,
3728 0xc8, 0xd3, 0xde, 0xe9, 0xf4, 0xff, 0x0a, 0x15,
3729 0x20, 0x2b, 0x36, 0x41, 0x4c, 0x57, 0x62, 0x6d,
3730 0x78, 0x83, 0x8e, 0x99, 0xa4, 0xaf, 0xba, 0xc5,
3731 0xd0, 0xdb, 0xe6, 0xf1, 0xfc, 0x07, 0x12, 0x1d,
3732 0x28, 0x33, 0x3e, 0x49, 0x54, 0x5f, 0x6a, 0x75,
3733 0x80, 0x8b, 0x96, 0xa1, 0xac, 0xb7, 0xc2, 0xcd,
3734 0xd8, 0xe3, 0xee, 0xf9, 0x04, 0x0f, 0x1a, 0x25,
3735 0x30, 0x3b, 0x46, 0x51, 0x5c, 0x67, 0x72, 0x7d,
3736 0x88, 0x93, 0x9e, 0xa9, 0xb4, 0xbf, 0xca, 0xd5,
3737 0xe0, 0xeb, 0xf6, 0x01, 0x0c, 0x17, 0x22, 0x2d,
3738 0x38, 0x43, 0x4e, 0x59, 0x64, 0x6f, 0x7a, 0x85,
3739 0x90, 0x9b, 0xa6, 0xb1, 0xbc, 0xc7, 0xd2, 0xdd,
3740 0xe8, 0xf3, 0xfe, 0x09, 0x14, 0x1f, 0x2a, 0x35,
3741 0x40, 0x4b, 0x56, 0x61, 0x6c, 0x77, 0x82, 0x8d,
3742 0x98, 0xa3, 0xae, 0xb9, 0xc4, 0xcf, 0xda, 0xe5,
3743 0xf0, 0xfb, 0x06, 0x11, 0x1c, 0x27, 0x32, 0x3d,
3744 0x48, 0x53, 0x5e, 0x69, 0x74, 0x7f, 0x8a, 0x95,
3745 0xa0, 0xab, 0xb6, 0xc1, 0xcc, 0xd7, 0xe2, 0xed,
3746 0xf8, 0x03, 0x0e, 0x19, 0x24, 0x2f, 0x3a, 0x45,
3747 0x50, 0x5b, 0x66, 0x71, 0x7c, 0x87, 0x92, 0x9d,
3748 0xa8, 0xb3, 0xbe, 0xc9, 0xd4, 0xdf, 0xea, 0xf5,
3749 0x00, 0x0d, 0x1a, 0x27, 0x34, 0x41, 0x4e, 0x5b,
3750 0x68, 0x75, 0x82, 0x8f, 0x9c, 0xa9, 0xb6, 0xc3,
3751 0xd0, 0xdd, 0xea, 0xf7, 0x04, 0x11, 0x1e, 0x2b,
3752 0x38, 0x45, 0x52, 0x5f, 0x6c, 0x79, 0x86, 0x93,
3753 0xa0, 0xad, 0xba, 0xc7, 0xd4, 0xe1, 0xee, 0xfb,
3754 0x08, 0x15, 0x22, 0x2f, 0x3c, 0x49, 0x56, 0x63,
3755 0x70, 0x7d, 0x8a, 0x97, 0xa4, 0xb1, 0xbe, 0xcb,
3756 0xd8, 0xe5, 0xf2, 0xff, 0x0c, 0x19, 0x26, 0x33,
3757 0x40, 0x4d, 0x5a, 0x67, 0x74, 0x81, 0x8e, 0x9b,
3758 0xa8, 0xb5, 0xc2, 0xcf, 0xdc, 0xe9, 0xf6, 0x03,
3759 0x10, 0x1d, 0x2a, 0x37, 0x44, 0x51, 0x5e, 0x6b,
3760 0x78, 0x85, 0x92, 0x9f, 0xac, 0xb9, 0xc6, 0xd3,
3761 0xe0, 0xed, 0xfa, 0x07, 0x14, 0x21, 0x2e, 0x3b,
3762 0x48, 0x55, 0x62, 0x6f, 0x7c, 0x89, 0x96, 0xa3,
3763 0xb0, 0xbd, 0xca, 0xd7, 0xe4, 0xf1, 0xfe, 0x0b,
3764 0x18, 0x25, 0x32, 0x3f, 0x4c, 0x59, 0x66, 0x73,
3765 0x80, 0x8d, 0x9a, 0xa7, 0xb4, 0xc1, 0xce, 0xdb,
3766 0xe8, 0xf5, 0x02, 0x0f, 0x1c, 0x29, 0x36, 0x43,
3767 0x50, 0x5d, 0x6a, 0x77, 0x84, 0x91, 0x9e, 0xab,
3768 0xb8, 0xc5, 0xd2, 0xdf, 0xec, 0xf9, 0x06, 0x13,
3769 0x20, 0x2d, 0x3a, 0x47, 0x54, 0x61, 0x6e, 0x7b,
3770 0x88, 0x95, 0xa2, 0xaf, 0xbc, 0xc9, 0xd6, 0xe3,
3771 0xf0, 0xfd, 0x0a, 0x17, 0x24, 0x31, 0x3e, 0x4b,
3772 0x58, 0x65, 0x72, 0x7f, 0x8c, 0x99, 0xa6, 0xb3,
3773 0xc0, 0xcd, 0xda, 0xe7, 0xf4, 0x01, 0x0e, 0x1b,
3774 0x28, 0x35, 0x42, 0x4f, 0x5c, 0x69, 0x76, 0x83,
3775 0x90, 0x9d, 0xaa, 0xb7, 0xc4, 0xd1, 0xde, 0xeb,
3776 0xf8, 0x05, 0x12, 0x1f, 0x2c, 0x39, 0x46, 0x53,
3777 0x60, 0x6d, 0x7a, 0x87, 0x94, 0xa1, 0xae, 0xbb,
3778 0xc8, 0xd5, 0xe2, 0xef, 0xfc, 0x09, 0x16, 0x23,
3779 0x30, 0x3d, 0x4a, 0x57, 0x64, 0x71, 0x7e, 0x8b,
3780 0x98, 0xa5, 0xb2, 0xbf, 0xcc, 0xd9, 0xe6, 0xf3,
3781 0x00, 0x0f, 0x1e, 0x2d, 0x3c, 0x4b, 0x5a, 0x69,
3782 0x78, 0x87, 0x96, 0xa5, 0xb4, 0xc3, 0xd2, 0xe1,
3783 0xf0, 0xff, 0x0e, 0x1d, 0x2c, 0x3b, 0x4a, 0x59,
3784 0x68, 0x77, 0x86, 0x95, 0xa4, 0xb3, 0xc2, 0xd1,
3785 0xe0, 0xef, 0xfe, 0x0d, 0x1c, 0x2b, 0x3a, 0x49,
3786 0x58, 0x67, 0x76, 0x85, 0x94, 0xa3, 0xb2, 0xc1,
3787 0xd0, 0xdf, 0xee, 0xfd, 0x0c, 0x1b, 0x2a, 0x39,
3788 0x48, 0x57, 0x66, 0x75, 0x84, 0x93, 0xa2, 0xb1,
3789 0xc0, 0xcf, 0xde, 0xed, 0xfc, 0x0b, 0x1a, 0x29,
3790 0x38, 0x47, 0x56, 0x65, 0x74, 0x83, 0x92, 0xa1,
3791 0xb0, 0xbf, 0xce, 0xdd, 0xec, 0xfb, 0x0a, 0x19,
3792 0x28, 0x37, 0x46, 0x55, 0x64, 0x73, 0x82, 0x91,
3793 0xa0, 0xaf, 0xbe, 0xcd, 0xdc, 0xeb, 0xfa, 0x09,
3794 0x18, 0x27, 0x36, 0x45, 0x54, 0x63, 0x72, 0x81,
3795 0x90, 0x9f, 0xae, 0xbd, 0xcc, 0xdb, 0xea, 0xf9,
3796 0x08, 0x17, 0x26, 0x35, 0x44, 0x53, 0x62, 0x71,
3797 0x80, 0x8f, 0x9e, 0xad, 0xbc, 0xcb, 0xda, 0xe9,
3798 0xf8, 0x07, 0x16, 0x25, 0x34, 0x43, 0x52, 0x61,
3799 0x70, 0x7f, 0x8e, 0x9d, 0xac, 0xbb, 0xca, 0xd9,
3800 0xe8, 0xf7, 0x06, 0x15, 0x24, 0x33, 0x42, 0x51,
3801 0x60, 0x6f, 0x7e, 0x8d, 0x9c, 0xab, 0xba, 0xc9,
3802 0xd8, 0xe7, 0xf6, 0x05, 0x14, 0x23, 0x32, 0x41,
3803 0x50, 0x5f, 0x6e, 0x7d, 0x8c, 0x9b, 0xaa, 0xb9,
3804 0xc8, 0xd7, 0xe6, 0xf5, 0x04, 0x13, 0x22, 0x31,
3805 0x40, 0x4f, 0x5e, 0x6d, 0x7c, 0x8b, 0x9a, 0xa9,
3806 0xb8, 0xc7, 0xd6, 0xe5, 0xf4, 0x03, 0x12, 0x21,
3807 0x30, 0x3f, 0x4e, 0x5d, 0x6c, 0x7b, 0x8a, 0x99,
3808 0xa8, 0xb7, 0xc6, 0xd5, 0xe4, 0xf3, 0x02, 0x11,
3809 0x20, 0x2f, 0x3e, 0x4d, 0x5c, 0x6b, 0x7a, 0x89,
3810 0x98, 0xa7, 0xb6, 0xc5, 0xd4, 0xe3, 0xf2, 0x01,
3811 0x10, 0x1f, 0x2e, 0x3d, 0x4c, 0x5b, 0x6a, 0x79,
3812 0x88, 0x97, 0xa6, 0xb5, 0xc4, 0xd3, 0xe2, 0xf1,
3813 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77,
3814 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff,
3815 0x10, 0x21, 0x32, 0x43, 0x54, 0x65, 0x76, 0x87,
3816 0x98, 0xa9, 0xba, 0xcb, 0xdc, 0xed, 0xfe, 0x0f,
3817 0x20, 0x31, 0x42, 0x53, 0x64, 0x75, 0x86, 0x97,
3818 0xa8, 0xb9, 0xca, 0xdb, 0xec, 0xfd, 0x0e, 0x1f,
3819 0x30, 0x41, 0x52, 0x63, 0x74, 0x85, 0x96, 0xa7,
3820 0xb8, 0xc9, 0xda, 0xeb, 0xfc, 0x0d, 0x1e, 0x2f,
3821 0x40, 0x51, 0x62, 0x73, 0x84, 0x95, 0xa6, 0xb7,
3822 0xc8, 0xd9, 0xea, 0xfb, 0x0c, 0x1d, 0x2e, 0x3f,
3823 0x50, 0x61, 0x72, 0x83, 0x94, 0xa5, 0xb6, 0xc7,
3824 0xd8, 0xe9, 0xfa, 0x0b, 0x1c, 0x2d, 0x3e, 0x4f,
3825 0x60, 0x71, 0x82, 0x93, 0xa4, 0xb5, 0xc6, 0xd7,
3826 0xe8, 0xf9, 0x0a, 0x1b, 0x2c, 0x3d, 0x4e, 0x5f,
3827 0x70, 0x81, 0x92, 0xa3, 0xb4, 0xc5, 0xd6, 0xe7,
3828 0xf8, 0x09, 0x1a, 0x2b, 0x3c, 0x4d, 0x5e, 0x6f,
3829 0x80, 0x91, 0xa2, 0xb3, 0xc4, 0xd5, 0xe6, 0xf7,
3830 0x08, 0x19, 0x2a, 0x3b, 0x4c, 0x5d, 0x6e, 0x7f,
3831 0x90, 0xa1, 0xb2, 0xc3, 0xd4, 0xe5, 0xf6, 0x07,
3832 0x18, 0x29, 0x3a, 0x4b, 0x5c, 0x6d, 0x7e, 0x8f,
3833 0xa0, 0xb1, 0xc2, 0xd3, 0xe4, 0xf5, 0x06, 0x17,
3834 0x28, 0x39, 0x4a, 0x5b, 0x6c, 0x7d, 0x8e, 0x9f,
3835 0xb0, 0xc1, 0xd2, 0xe3, 0xf4, 0x05, 0x16, 0x27,
3836 0x38, 0x49, 0x5a, 0x6b, 0x7c, 0x8d, 0x9e, 0xaf,
3837 0xc0, 0xd1, 0xe2, 0xf3, 0x04, 0x15, 0x26, 0x37,
3838 0x48, 0x59, 0x6a, 0x7b, 0x8c, 0x9d, 0xae, 0xbf,
3839 0xd0, 0xe1, 0xf2, 0x03, 0x14, 0x25, 0x36, 0x47,
3840 0x58, 0x69, 0x7a, 0x8b, 0x9c, 0xad, 0xbe, 0xcf,
3841 0xe0, 0xf1, 0x02, 0x13, 0x24, 0x35, 0x46, 0x57,
3842 0x68, 0x79, 0x8a, 0x9b, 0xac, 0xbd, 0xce, 0xdf,
3843 0xf0, 0x01, 0x12, 0x23, 0x34, 0x45, 0x56, 0x67,
3844 0x78, 0x89, 0x9a, 0xab, 0xbc, 0xcd, 0xde, 0xef,
3845 0x00, 0x13, 0x26, 0x39, 0x4c, 0x5f, 0x72, 0x85,
3846 0x98, 0xab, 0xbe, 0xd1, 0xe4, 0xf7, 0x0a, 0x1d,
3847 0x30, 0x43, 0x56, 0x69, 0x7c, 0x8f, 0xa2, 0xb5,
3848 0xc8, 0xdb, 0xee, 0x01, 0x14, 0x27, 0x3a, 0x4d,
3849 0x60, 0x73, 0x86, 0x99, 0xac, 0xbf, 0xd2, 0xe5,
3850 0xf8, 0x0b, 0x1e, 0x31, 0x44, 0x57, 0x6a, 0x7d,
3851 0x90, 0xa3, 0xb6, 0xc9, 0xdc, 0xef, 0x02, 0x15,
3852 0x28, 0x3b, 0x4e, 0x61, 0x74, 0x87, 0x9a, 0xad,
3853 0xc0, 0xd3, 0xe6, 0xf9, 0x0c, 0x1f, 0x32, 0x45,
3854 0x58, 0x6b, 0x7e, 0x91, 0xa4, 0xb7, 0xca, 0xdd,
3855 0xf0, 0x03, 0x16, 0x29, 0x3c, 0x4f, 0x62, 0x75,
3856 0x88, 0x9b, 0xae, 0xc1, 0xd4, 0xe7, 0xfa, 0x0d,
3857 0x20, 0x33, 0x46, 0x59, 0x6c, 0x7f, 0x92, 0xa5,
3858 0xb8, 0xcb, 0xde, 0xf1, 0x04, 0x17, 0x2a, 0x3d,
3859 0x50, 0x63, 0x76, 0x89, 0x9c, 0xaf, 0xc2, 0xd5,
3860 0xe8, 0xfb, 0x0e, 0x21, 0x34, 0x47, 0x5a, 0x6d,
3861 0x80, 0x93, 0xa6, 0xb9, 0xcc, 0xdf, 0xf2, 0x05,
3862 0x18, 0x2b, 0x3e, 0x51, 0x64, 0x77, 0x8a, 0x9d,
3863 0xb0, 0xc3, 0xd6, 0xe9, 0xfc, 0x0f, 0x22, 0x35,
3864 0x48, 0x5b, 0x6e, 0x81, 0x94, 0xa7, 0xba, 0xcd,
3865 0xe0, 0xf3, 0x06, 0x19, 0x2c, 0x3f, 0x52, 0x65,
3866 0x78, 0x8b, 0x9e, 0xb1, 0xc4, 0xd7, 0xea, 0xfd,
3867 0x10, 0x23, 0x36, 0x49, 0x5c, 0x6f, 0x82, 0x95,
3868 0xa8, 0xbb, 0xce, 0xe1, 0xf4, 0x07, 0x1a, 0x2d,
3869 0x40, 0x53, 0x66, 0x79, 0x8c, 0x9f, 0xb2, 0xc5,
3870 0xd8, 0xeb, 0xfe, 0x11, 0x24, 0x37, 0x4a, 0x5d,
3871 0x70, 0x83, 0x96, 0xa9, 0xbc, 0xcf, 0xe2, 0xf5,
3872 0x08, 0x1b, 0x2e, 0x41, 0x54, 0x67, 0x7a, 0x8d,
3873 0xa0, 0xb3, 0xc6, 0xd9, 0xec, 0xff, 0x12, 0x25,
3874 0x38, 0x4b, 0x5e, 0x71, 0x84, 0x97, 0xaa, 0xbd,
3875 0xd0, 0xe3, 0xf6, 0x09, 0x1c, 0x2f, 0x42, 0x55,
3876 0x68, 0x7b, 0x8e, 0xa1, 0xb4, 0xc7, 0xda, 0xed,
3877 0x00, 0x15, 0x2a, 0x3f, 0x54, 0x69, 0x7e, 0x93,
3878 0xa8, 0xbd, 0xd2, 0xe7, 0xfc, 0x11, 0x26, 0x3b,
3879 0x50, 0x65, 0x7a, 0x8f, 0xa4, 0xb9, 0xce, 0xe3,
3880 0xf8, 0x0d, 0x22, 0x37, 0x4c, 0x61, 0x76, 0x8b,
3881 0xa0, 0xb5, 0xca, 0xdf, 0xf4, 0x09, 0x1e, 0x33,
3882 0x48, 0x5d, 0x72, 0x87, 0x9c, 0xb1, 0xc6, 0xdb,
3883 0xf0, 0x05, 0x1a, 0x2f, 0x44, 0x59, 0x6e, 0x83,
3884 0x98, 0xad, 0xc2, 0xd7, 0xec, 0x01, 0x16, 0x2b,
3885 0x40, 0x55, 0x6a, 0x7f, 0x94, 0xa9, 0xbe, 0xd3,
3886 0xe8, 0xfd, 0x12, 0x27, 0x3c, 0x51, 0x66, 0x7b,
3887 0x90, 0xa5, 0xba, 0xcf, 0xe4, 0xf9, 0x0e, 0x23,
3888 0x38, 0x4d, 0x62, 0x77, 0x8c, 0xa1, 0xb6, 0xcb,
3889 0xe0, 0xf5, 0x0a, 0x1f, 0x34, 0x49, 0x5e, 0x73,
3890 0x88, 0x9d, 0xb2, 0xc7, 0xdc, 0xf1, 0x06, 0x1b,
3891 0x30, 0x45, 0x5a, 0x6f, 0x84, 0x99, 0xae, 0xc3,
3892 0xd8, 0xed, 0x02, 0x17, 0x2c, 0x41, 0x56, 0x6b,
3893 0x80, 0x95, 0xaa, 0xbf, 0xd4, 0xe9, 0xfe, 0x13,
3894 0x28, 0x3d, 0x52, 0x67, 0x7c, 0x91, 0xa6, 0xbb,
3895 0xd0, 0xe5, 0xfa, 0x0f, 0x24, 0x39, 0x4e, 0x63,
3896 0x78, 0x8d, 0xa2, 0xb7, 0xcc, 0xe1, 0xf6, 0x0b,
3897 0x20, 0x35, 0x4a, 0x5f, 0x74, 0x89, 0x9e, 0xb3,
3898 0xc8, 0xdd, 0xf2, 0x07, 0x1c, 0x31, 0x46, 0x5b,
3899 0x70, 0x85, 0x9a, 0xaf, 0xc4, 0xd9, 0xee, 0x03,
3900 0x18, 0x2d, 0x42, 0x57, 0x6c, 0x81, 0x96, 0xab,
3901 0xc0, 0xd5, 0xea, 0xff, 0x14, 0x29, 0x3e, 0x53,
3902 0x68, 0x7d, 0x92, 0xa7, 0xbc, 0xd1, 0xe6, 0xfb,
3903 0x10, 0x25, 0x3a, 0x4f, 0x64, 0x79, 0x8e, 0xa3,
3904 0xb8, 0xcd, 0xe2, 0xf7, 0x0c, 0x21, 0x36, 0x4b,
3905 0x60, 0x75, 0x8a, 0x9f, 0xb4, 0xc9, 0xde, 0xf3,
3906 0x08, 0x1d, 0x32, 0x47, 0x5c, 0x71, 0x86, 0x9b,
3907 0xb0, 0xc5, 0xda, 0xef, 0x04, 0x19, 0x2e, 0x43,
3908 0x58, 0x6d, 0x82, 0x97, 0xac, 0xc1, 0xd6, 0xeb,
3909 0x00, 0x17, 0x2e, 0x45, 0x5c, 0x73, 0x8a, 0xa1,
3910 0xb8, 0xcf, 0xe6, 0xfd, 0x14, 0x2b, 0x42, 0x59,
3911 0x70, 0x87, 0x9e, 0xb5, 0xcc, 0xe3, 0xfa, 0x11,
3912 0x28, 0x3f, 0x56, 0x6d, 0x84, 0x9b, 0xb2, 0xc9,
3913 0xe0, 0xf7, 0x0e, 0x25, 0x3c, 0x53, 0x6a, 0x81,
3914 0x98, 0xaf, 0xc6, 0xdd, 0xf4, 0x0b, 0x22, 0x39,
3915 0x50, 0x67, 0x7e, 0x95, 0xac, 0xc3, 0xda, 0xf1,
3916 0x08, 0x1f, 0x36, 0x4d, 0x64, 0x7b, 0x92, 0xa9,
3917 0xc0, 0xd7, 0xee, 0x05, 0x1c, 0x33, 0x4a, 0x61,
3918 0x78, 0x8f, 0xa6, 0xbd, 0xd4, 0xeb, 0x02, 0x19,
3919 0x30, 0x47, 0x5e, 0x75, 0x8c, 0xa3, 0xba, 0xd1,
3920 0xe8, 0xff, 0x16, 0x2d, 0x44, 0x5b, 0x72, 0x89,
3921 0xa0, 0xb7, 0xce, 0xe5, 0xfc, 0x13, 0x2a, 0x41,
3922 0x58, 0x6f, 0x86, 0x9d, 0xb4, 0xcb, 0xe2, 0xf9,
3923 0x10, 0x27, 0x3e, 0x55, 0x6c, 0x83, 0x9a, 0xb1,
3924 0xc8, 0xdf, 0xf6, 0x0d, 0x24, 0x3b, 0x52, 0x69,
3925 0x80, 0x97, 0xae, 0xc5, 0xdc, 0xf3, 0x0a, 0x21,
3926 0x38, 0x4f, 0x66, 0x7d, 0x94, 0xab, 0xc2, 0xd9,
3927 0xf0, 0x07, 0x1e, 0x35, 0x4c, 0x63, 0x7a, 0x91,
3928 0xa8, 0xbf, 0xd6, 0xed, 0x04, 0x1b, 0x32, 0x49,
3929 0x60, 0x77, 0x8e, 0xa5, 0xbc, 0xd3, 0xea, 0x01,
3930 0x18, 0x2f, 0x46, 0x5d, 0x74, 0x8b, 0xa2, 0xb9,
3931 0xd0, 0xe7, 0xfe, 0x15, 0x2c, 0x43, 0x5a, 0x71,
3932 0x88, 0x9f, 0xb6, 0xcd, 0xe4, 0xfb, 0x12, 0x29,
3933 0x40, 0x57, 0x6e, 0x85, 0x9c, 0xb3, 0xca, 0xe1,
3934 0xf8, 0x0f, 0x26, 0x3d, 0x54, 0x6b, 0x82, 0x99,
3935 0xb0, 0xc7, 0xde, 0xf5, 0x0c, 0x23, 0x3a, 0x51,
3936 0x68, 0x7f, 0x96, 0xad, 0xc4, 0xdb, 0xf2, 0x09,
3937 0x20, 0x37, 0x4e, 0x65, 0x7c, 0x93, 0xaa, 0xc1,
3938 0xd8, 0xef, 0x06, 0x1d, 0x34, 0x4b, 0x62, 0x79,
3939 0x90, 0xa7, 0xbe, 0xd5, 0xec, 0x03, 0x1a, 0x31,
3940 0x48, 0x5f, 0x76, 0x8d, 0xa4, 0xbb, 0xd2, 0xe9,
3941 0x00, 0x19, 0x32, 0x4b, 0x64, 0x7d, 0x96, 0xaf,
3942 0xc8, 0xe1, 0xfa, 0x13, 0x2c, 0x45, 0x5e, 0x77,
3943 0x90, 0xa9, 0xc2, 0xdb, 0xf4, 0x0d, 0x26, 0x3f,
3944 0x58, 0x71, 0x8a, 0xa3, 0xbc, 0xd5, 0xee, 0x07,
3945 0x20, 0x39, 0x52, 0x6b, 0x84, 0x9d, 0xb6, 0xcf,
3946 0xe8, 0x01, 0x1a, 0x33, 0x4c, 0x65, 0x7e, 0x97,
3947 0xb0, 0xc9, 0xe2, 0xfb, 0x14, 0x2d, 0x46, 0x5f,
3948 0x78, 0x91, 0xaa, 0xc3, 0xdc, 0xf5, 0x0e, 0x27,
3949 0x40, 0x59, 0x72, 0x8b, 0xa4, 0xbd, 0xd6, 0xef,
3950 0x08, 0x21, 0x3a, 0x53, 0x6c, 0x85, 0x9e, 0xb7,
3951 0xd0, 0xe9, 0x02, 0x1b, 0x34, 0x4d, 0x66, 0x7f,
3952 0x98, 0xb1, 0xca, 0xe3, 0xfc, 0x15, 0x2e, 0x47,
3953 0x60, 0x79, 0x92, 0xab, 0xc4, 0xdd, 0xf6, 0x0f,
3954 0x28, 0x41, 0x5a, 0x73, 0x8c, 0xa5, 0xbe, 0xd7,
3955 0xf0, 0x09, 0x22, 0x3b, 0x54, 0x6d, 0x86, 0x9f,
3956 0xb8, 0xd1, 0xea, 0x03, 0x1c, 0x35, 0x4e, 0x67,
3957 0x80, 0x99, 0xb2, 0xcb, 0xe4, 0xfd, 0x16, 0x2f,
3958 0x48, 0x61, 0x7a, 0x93, 0xac, 0xc5, 0xde, 0xf7,
3959 0x10, 0x29, 0x42, 0x5b, 0x74, 0x8d, 0xa6, 0xbf,
3960 0xd8, 0xf1, 0x0a, 0x23, 0x3c, 0x55, 0x6e, 0x87,
3961 0xa0, 0xb9, 0xd2, 0xeb, 0x04, 0x1d, 0x36, 0x4f,
3962 0x68, 0x81, 0x9a, 0xb3, 0xcc, 0xe5, 0xfe, 0x17,
3963 0x30, 0x49, 0x62, 0x7b, 0x94, 0xad, 0xc6, 0xdf,
3964 0xf8, 0x11, 0x2a, 0x43, 0x5c, 0x75, 0x8e, 0xa7,
3965 0xc0, 0xd9, 0xf2, 0x0b, 0x24, 0x3d, 0x56, 0x6f,
3966 0x88, 0xa1, 0xba, 0xd3, 0xec, 0x05, 0x1e, 0x37,
3967 0x50, 0x69, 0x82, 0x9b, 0xb4, 0xcd, 0xe6, 0xff,
3968 0x18, 0x31, 0x4a, 0x63, 0x7c, 0x95, 0xae, 0xc7,
3969 0xe0, 0xf9, 0x12, 0x2b, 0x44, 0x5d, 0x76, 0x8f,
3970 0xa8, 0xc1, 0xda, 0xf3, 0x0c, 0x25, 0x3e, 0x57,
3971 0x70, 0x89, 0xa2, 0xbb, 0xd4, 0xed, 0x06, 0x1f,
3972 0x38, 0x51, 0x6a, 0x83, 0x9c, 0xb5, 0xce, 0xe7,
3973 0x00, 0x1b, 0x36, 0x51, 0x6c, 0x87, 0xa2, 0xbd,
3974 0xd8, 0xf3, 0x0e, 0x29, 0x44, 0x5f, 0x7a, 0x95,
3975 0xb0, 0xcb, 0xe6, 0x01, 0x1c, 0x37, 0x52, 0x6d,
3976 0x88, 0xa3, 0xbe, 0xd9, 0xf4, 0x0f, 0x2a, 0x45,
3977 0x60, 0x7b, 0x96, 0xb1, 0xcc, 0xe7, 0x02, 0x1d,
3978 0x38, 0x53, 0x6e, 0x89, 0xa4, 0xbf, 0xda, 0xf5,
3979 0x10, 0x2b, 0x46, 0x61, 0x7c, 0x97, 0xb2, 0xcd,
3980 0xe8, 0x03, 0x1e, 0x39, 0x54, 0x6f, 0x8a, 0xa5,
3981 0xc0, 0xdb, 0xf6, 0x11, 0x2c, 0x47, 0x62, 0x7d,
3982 0x98, 0xb3, 0xce, 0xe9, 0x04, 0x1f, 0x3a, 0x55,
3983 0x70, 0x8b, 0xa6, 0xc1, 0xdc, 0xf7, 0x12, 0x2d,
3984 0x48, 0x63, 0x7e, 0x99, 0xb4, 0xcf, 0xea, 0x05,
3985 0x20, 0x3b, 0x56, 0x71, 0x8c, 0xa7, 0xc2, 0xdd,
3986 0xf8, 0x13, 0x2e, 0x49, 0x64, 0x7f, 0x9a, 0xb5,
3987 0xd0, 0xeb, 0x06, 0x21, 0x3c, 0x57, 0x72, 0x8d,
3988 0xa8, 0xc3, 0xde, 0xf9, 0x14, 0x2f, 0x4a, 0x65,
3989 0x80, 0x9b, 0xb6, 0xd1, 0xec, 0x07, 0x22, 0x3d,
3990 0x58, 0x73, 0x8e, 0xa9, 0xc4, 0xdf, 0xfa, 0x15,
3991 0x30, 0x4b, 0x66, 0x81, 0x9c, 0xb7, 0xd2, 0xed,
3992 0x08, 0x23, 0x3e, 0x59, 0x74, 0x8f, 0xaa, 0xc5,
3993 0xe0, 0xfb, 0x16, 0x31, 0x4c, 0x67, 0x82, 0x9d,
3994 0xb8, 0xd3, 0xee, 0x09, 0x24, 0x3f, 0x5a, 0x75,
3995 0x90, 0xab, 0xc6, 0xe1, 0xfc, 0x17, 0x32, 0x4d,
3996 0x68, 0x83, 0x9e, 0xb9, 0xd4, 0xef, 0x0a, 0x25,
3997 0x40, 0x5b, 0x76, 0x91, 0xac, 0xc7, 0xe2, 0xfd,
3998 0x18, 0x33, 0x4e, 0x69, 0x84, 0x9f, 0xba, 0xd5,
3999 0xf0, 0x0b, 0x26, 0x41, 0x5c, 0x77, 0x92, 0xad,
4000 0xc8, 0xe3, 0xfe, 0x19, 0x34, 0x4f, 0x6a, 0x85,
4001 0xa0, 0xbb, 0xd6, 0xf1, 0x0c, 0x27, 0x42, 0x5d,
4002 0x78, 0x93, 0xae, 0xc9, 0xe4, 0xff, 0x1a, 0x35,
4003 0x50, 0x6b, 0x86, 0xa1, 0xbc, 0xd7, 0xf2, 0x0d,
4004 0x28, 0x43, 0x5e, 0x79, 0x94, 0xaf, 0xca, 0xe5,
4005 0x00, 0x1d, 0x3a, 0x57, 0x74, 0x91, 0xae, 0xcb,
4006 0xe8, 0x05, 0x22, 0x3f, 0x5c, 0x79, 0x96, 0xb3,
4007 0xd0, 0xed, 0x0a, 0x27, 0x44, 0x61, 0x7e, 0x9b,
4008 0xb8, 0xd5, 0xf2, 0x0f, 0x2c, 0x49, 0x66, 0x83,
4009 0xa0, 0xbd, 0xda, 0xf7, 0x14, 0x31, 0x4e, 0x6b,
4010 0x88, 0xa5, 0xc2, 0xdf, 0xfc, 0x19, 0x36, 0x53,
4011 0x70, 0x8d, 0xaa, 0xc7, 0xe4, 0x01, 0x1e, 0x3b,
4012 0x58, 0x75, 0x92, 0xaf, 0xcc, 0xe9, 0x06, 0x23,
4013 0x40, 0x5d, 0x7a, 0x97, 0xb4, 0xd1, 0xee, 0x0b,
4014 0x28, 0x45, 0x62, 0x7f, 0x9c, 0xb9, 0xd6, 0xf3,
4015 0x10, 0x2d, 0x4a, 0x67, 0x84, 0xa1, 0xbe, 0xdb,
4016 0xf8, 0x15, 0x32, 0x4f, 0x6c, 0x89, 0xa6, 0xc3,
4017 0xe0, 0xfd, 0x1a, 0x37, 0x54, 0x71, 0x8e, 0xab,
4018 0xc8, 0xe5, 0x02, 0x1f, 0x3c, 0x59, 0x76, 0x93,
4019 0xb0, 0xcd, 0xea, 0x07, 0x24, 0x41, 0x5e, 0x7b,
4020 0x98, 0xb5, 0xd2, 0xef, 0x0c, 0x29, 0x46, 0x63,
4021 0x80, 0x9d, 0xba, 0xd7, 0xf4, 0x11, 0x2e, 0x4b,
4022 0x68, 0x85, 0xa2, 0xbf, 0xdc, 0xf9, 0x16, 0x33,
4023 0x50, 0x6d, 0x8a, 0xa7, 0xc4, 0xe1, 0xfe, 0x1b,
4024 0x38, 0x55, 0x72, 0x8f, 0xac, 0xc9, 0xe6, 0x03,
4025 0x20, 0x3d, 0x5a, 0x77, 0x94, 0xb1, 0xce, 0xeb,
4026 0x08, 0x25, 0x42, 0x5f, 0x7c, 0x99, 0xb6, 0xd3,
4027 0xf0, 0x0d, 0x2a, 0x47, 0x64, 0x81, 0x9e, 0xbb,
4028 0xd8, 0xf5, 0x12, 0x2f, 0x4c, 0x69, 0x86, 0xa3,
4029 0xc0, 0xdd, 0xfa, 0x17, 0x34, 0x51, 0x6e, 0x8b,
4030 0xa8, 0xc5, 0xe2, 0xff, 0x1c, 0x39, 0x56, 0x73,
4031 0x90, 0xad, 0xca, 0xe7, 0x04, 0x21, 0x3e, 0x5b,
4032 0x78, 0x95, 0xb2, 0xcf, 0xec, 0x09, 0x26, 0x43,
4033 0x60, 0x7d, 0x9a, 0xb7, 0xd4, 0xf1, 0x0e, 0x2b,
4034 0x48, 0x65, 0x82, 0x9f, 0xbc, 0xd9, 0xf6, 0x13,
4035 0x30, 0x4d, 0x6a, 0x87, 0xa4, 0xc1, 0xde, 0xfb,
4036 0x18, 0x35, 0x52, 0x6f, 0x8c, 0xa9, 0xc6, 0xe3,
4037 0x00, 0x1f, 0x3e, 0x5d, 0x7c, 0x9b, 0xba, 0xd9,
4038 0xf8, 0x17, 0x36, 0x55, 0x74, 0x93, 0xb2, 0xd1,
4039 0xf0, 0x0f, 0x2e, 0x4d, 0x6c, 0x8b, 0xaa, 0xc9,
4040 0xe8, 0x07, 0x26, 0x45, 0x64, 0x83, 0xa2, 0xc1,
4041 0xe0, 0xff, 0x1e, 0x3d, 0x5c, 0x7b, 0x9a, 0xb9,
4042 0xd8, 0xf7, 0x16, 0x35, 0x54, 0x73, 0x92, 0xb1,
4043 0xd0, 0xef, 0x0e, 0x2d, 0x4c, 0x6b, 0x8a, 0xa9,
4044 0xc8, 0xe7, 0x06, 0x25, 0x44, 0x63, 0x82, 0xa1,
4045 0xc0, 0xdf, 0xfe, 0x1d, 0x3c, 0x5b, 0x7a, 0x99,
4046 0xb8, 0xd7, 0xf6, 0x15, 0x34, 0x53, 0x72, 0x91,
4047 0xb0, 0xcf, 0xee, 0x0d, 0x2c, 0x4b, 0x6a, 0x89,
4048 0xa8, 0xc7, 0xe6, 0x05, 0x24, 0x43, 0x62, 0x81,
4049 0xa0, 0xbf, 0xde, 0xfd, 0x1c, 0x3b, 0x5a, 0x79,
4050 0x98, 0xb7, 0xd6, 0xf5, 0x14, 0x33, 0x52, 0x71,
4051 0x90, 0xaf, 0xce, 0xed, 0x0c, 0x2b, 0x4a, 0x69,
4052 0x88, 0xa7, 0xc6, 0xe5, 0x04, 0x23, 0x42, 0x61,
4053 0x80, 0x9f, 0xbe, 0xdd, 0xfc, 0x1b, 0x3a, 0x59,
4054 0x78, 0x97, 0xb6, 0xd5, 0xf4, 0x13, 0x32, 0x51,
4055 0x70, 0x8f, 0xae, 0xcd, 0xec, 0x0b, 0x2a, 0x49,
4056 0x68, 0x87, 0xa6, 0xc5, 0xe4, 0x03, 0x22, 0x41,
4057 0x60, 0x7f, 0x9e, 0xbd, 0xdc, 0xfb, 0x1a, 0x39,
4058 0x58, 0x77, 0x96, 0xb5, 0xd4, 0xf3, 0x12, 0x31,
4059 0x50, 0x6f, 0x8e, 0xad, 0xcc, 0xeb, 0x0a, 0x29,
4060 0x48, 0x67, 0x86, 0xa5, 0xc4, 0xe3, 0x02, 0x21,
4061 0x40, 0x5f, 0x7e, 0x9d, 0xbc, 0xdb, 0xfa, 0x19,
4062 0x38, 0x57, 0x76, 0x95, 0xb4, 0xd3, 0xf2, 0x11,
4063 0x30, 0x4f, 0x6e, 0x8d, 0xac, 0xcb, 0xea, 0x09,
4064 0x28, 0x47, 0x66, 0x85, 0xa4, 0xc3, 0xe2, 0x01,
4065 0x20, 0x3f, 0x5e, 0x7d, 0x9c, 0xbb, 0xda, 0xf9,
4066 0x18, 0x37, 0x56, 0x75, 0x94, 0xb3, 0xd2, 0xf1,
4067 0x10, 0x2f, 0x4e, 0x6d, 0x8c, 0xab, 0xca, 0xe9,
4068 0x08, 0x27, 0x46, 0x65, 0x84, 0xa3, 0xc2, 0xe1,
4069 0x00, 0x21, 0x42, 0x63,
4070 },
4071 .ilen = 4100,
4072 .result = {
4073 0xf0, 0x5c, 0x74, 0xad, 0x4e, 0xbc, 0x99, 0xe2,
4074 0xae, 0xff, 0x91, 0x3a, 0x44, 0xcf, 0x38, 0x32,
4075 0x1e, 0xad, 0xa7, 0xcd, 0xa1, 0x39, 0x95, 0xaa,
4076 0x10, 0xb1, 0xb3, 0x2e, 0x04, 0x31, 0x8f, 0x86,
4077 0xf2, 0x62, 0x74, 0x70, 0x0c, 0xa4, 0x46, 0x08,
4078 0xa8, 0xb7, 0x99, 0xa8, 0xe9, 0xd2, 0x73, 0x79,
4079 0x7e, 0x6e, 0xd4, 0x8f, 0x1e, 0xc7, 0x8e, 0x31,
4080 0x0b, 0xfa, 0x4b, 0xce, 0xfd, 0xf3, 0x57, 0x71,
4081 0xe9, 0x46, 0x03, 0xa5, 0x3d, 0x34, 0x00, 0xe2,
4082 0x18, 0xff, 0x75, 0x6d, 0x06, 0x2d, 0x00, 0xab,
4083 0xb9, 0x3e, 0x6c, 0x59, 0xc5, 0x84, 0x06, 0xb5,
4084 0x8b, 0xd0, 0x89, 0x9c, 0x4a, 0x79, 0x16, 0xc6,
4085 0x3d, 0x74, 0x54, 0xfa, 0x44, 0xcd, 0x23, 0x26,
4086 0x5c, 0xcf, 0x7e, 0x28, 0x92, 0x32, 0xbf, 0xdf,
4087 0xa7, 0x20, 0x3c, 0x74, 0x58, 0x2a, 0x9a, 0xde,
4088 0x61, 0x00, 0x1c, 0x4f, 0xff, 0x59, 0xc4, 0x22,
4089 0xac, 0x3c, 0xd0, 0xe8, 0x6c, 0xf9, 0x97, 0x1b,
4090 0x58, 0x9b, 0xad, 0x71, 0xe8, 0xa9, 0xb5, 0x0d,
4091 0xee, 0x2f, 0x04, 0x1f, 0x7f, 0xbc, 0x99, 0xee,
4092 0x84, 0xff, 0x42, 0x60, 0xdc, 0x3a, 0x18, 0xa5,
4093 0x81, 0xf9, 0xef, 0xdc, 0x7a, 0x0f, 0x65, 0x41,
4094 0x2f, 0xa3, 0xd3, 0xf9, 0xc2, 0xcb, 0xc0, 0x4d,
4095 0x8f, 0xd3, 0x76, 0x96, 0xad, 0x49, 0x6d, 0x38,
4096 0x3d, 0x39, 0x0b, 0x6c, 0x80, 0xb7, 0x54, 0x69,
4097 0xf0, 0x2c, 0x90, 0x02, 0x29, 0x0d, 0x1c, 0x12,
4098 0xad, 0x55, 0xc3, 0x8b, 0x68, 0xd9, 0xcc, 0xb3,
4099 0xb2, 0x64, 0x33, 0x90, 0x5e, 0xca, 0x4b, 0xe2,
4100 0xfb, 0x75, 0xdc, 0x63, 0xf7, 0x9f, 0x82, 0x74,
4101 0xf0, 0xc9, 0xaa, 0x7f, 0xe9, 0x2a, 0x9b, 0x33,
4102 0xbc, 0x88, 0x00, 0x7f, 0xca, 0xb2, 0x1f, 0x14,
4103 0xdb, 0xc5, 0x8e, 0x7b, 0x11, 0x3c, 0x3e, 0x08,
4104 0xf3, 0x83, 0xe8, 0xe0, 0x94, 0x86, 0x2e, 0x92,
4105 0x78, 0x6b, 0x01, 0xc9, 0xc7, 0x83, 0xba, 0x21,
4106 0x6a, 0x25, 0x15, 0x33, 0x4e, 0x45, 0x08, 0xec,
4107 0x35, 0xdb, 0xe0, 0x6e, 0x31, 0x51, 0x79, 0xa9,
4108 0x42, 0x44, 0x65, 0xc1, 0xa0, 0xf1, 0xf9, 0x2a,
4109 0x70, 0xd5, 0xb6, 0xc6, 0xc1, 0x8c, 0x39, 0xfc,
4110 0x25, 0xa6, 0x55, 0xd9, 0xdd, 0x2d, 0x4c, 0xec,
4111 0x49, 0xc6, 0xeb, 0x0e, 0xa8, 0x25, 0x2a, 0x16,
4112 0x1b, 0x66, 0x84, 0xda, 0xe2, 0x92, 0xe5, 0xc0,
4113 0xc8, 0x53, 0x07, 0xaf, 0x80, 0x84, 0xec, 0xfd,
4114 0xcd, 0xd1, 0x6e, 0xcd, 0x6f, 0x6a, 0xf5, 0x36,
4115 0xc5, 0x15, 0xe5, 0x25, 0x7d, 0x77, 0xd1, 0x1a,
4116 0x93, 0x36, 0xa9, 0xcf, 0x7c, 0xa4, 0x54, 0x4a,
4117 0x06, 0x51, 0x48, 0x4e, 0xf6, 0x59, 0x87, 0xd2,
4118 0x04, 0x02, 0xef, 0xd3, 0x44, 0xde, 0x76, 0x31,
4119 0xb3, 0x34, 0x17, 0x1b, 0x9d, 0x66, 0x11, 0x9f,
4120 0x1e, 0xcc, 0x17, 0xe9, 0xc7, 0x3c, 0x1b, 0xe7,
4121 0xcb, 0x50, 0x08, 0xfc, 0xdc, 0x2b, 0x24, 0xdb,
4122 0x65, 0x83, 0xd0, 0x3b, 0xe3, 0x30, 0xea, 0x94,
4123 0x6c, 0xe7, 0xe8, 0x35, 0x32, 0xc7, 0xdb, 0x64,
4124 0xb4, 0x01, 0xab, 0x36, 0x2c, 0x77, 0x13, 0xaf,
4125 0xf8, 0x2b, 0x88, 0x3f, 0x54, 0x39, 0xc4, 0x44,
4126 0xfe, 0xef, 0x6f, 0x68, 0x34, 0xbe, 0x0f, 0x05,
4127 0x16, 0x6d, 0xf6, 0x0a, 0x30, 0xe7, 0xe3, 0xed,
4128 0xc4, 0xde, 0x3c, 0x1b, 0x13, 0xd8, 0xdb, 0xfe,
4129 0x41, 0x62, 0xe5, 0x28, 0xd4, 0x8d, 0xa3, 0xc7,
4130 0x93, 0x97, 0xc6, 0x48, 0x45, 0x1d, 0x9f, 0x83,
4131 0xdf, 0x4b, 0x40, 0x3e, 0x42, 0x25, 0x87, 0x80,
4132 0x4c, 0x7d, 0xa8, 0xd4, 0x98, 0x23, 0x95, 0x75,
4133 0x41, 0x8c, 0xda, 0x41, 0x9b, 0xd4, 0xa7, 0x06,
4134 0xb5, 0xf1, 0x71, 0x09, 0x53, 0xbe, 0xca, 0xbf,
4135 0x32, 0x03, 0xed, 0xf0, 0x50, 0x1c, 0x56, 0x39,
4136 0x5b, 0xa4, 0x75, 0x18, 0xf7, 0x9b, 0x58, 0xef,
4137 0x53, 0xfc, 0x2a, 0x38, 0x23, 0x15, 0x75, 0xcd,
4138 0x45, 0xe5, 0x5a, 0x82, 0x55, 0xba, 0x21, 0xfa,
4139 0xd4, 0xbd, 0xc6, 0x94, 0x7c, 0xc5, 0x80, 0x12,
4140 0xf7, 0x4b, 0x32, 0xc4, 0x9a, 0x82, 0xd8, 0x28,
4141 0x8f, 0xd9, 0xc2, 0x0f, 0x60, 0x03, 0xbe, 0x5e,
4142 0x21, 0xd6, 0x5f, 0x58, 0xbf, 0x5c, 0xb1, 0x32,
4143 0x82, 0x8d, 0xa9, 0xe5, 0xf2, 0x66, 0x1a, 0xc0,
4144 0xa0, 0xbc, 0x58, 0x2f, 0x71, 0xf5, 0x2f, 0xed,
4145 0xd1, 0x26, 0xb9, 0xd8, 0x49, 0x5a, 0x07, 0x19,
4146 0x01, 0x7c, 0x59, 0xb0, 0xf8, 0xa4, 0xb7, 0xd3,
4147 0x7b, 0x1a, 0x8c, 0x38, 0xf4, 0x50, 0xa4, 0x59,
4148 0xb0, 0xcc, 0x41, 0x0b, 0x88, 0x7f, 0xe5, 0x31,
4149 0xb3, 0x42, 0xba, 0xa2, 0x7e, 0xd4, 0x32, 0x71,
4150 0x45, 0x87, 0x48, 0xa9, 0xc2, 0xf2, 0x89, 0xb3,
4151 0xe4, 0xa7, 0x7e, 0x52, 0x15, 0x61, 0xfa, 0xfe,
4152 0xc9, 0xdd, 0x81, 0xeb, 0x13, 0xab, 0xab, 0xc3,
4153 0x98, 0x59, 0xd8, 0x16, 0x3d, 0x14, 0x7a, 0x1c,
4154 0x3c, 0x41, 0x9a, 0x16, 0x16, 0x9b, 0xd2, 0xd2,
4155 0x69, 0x3a, 0x29, 0x23, 0xac, 0x86, 0x32, 0xa5,
4156 0x48, 0x9c, 0x9e, 0xf3, 0x47, 0x77, 0x81, 0x70,
4157 0x24, 0xe8, 0x85, 0xd2, 0xf5, 0xb5, 0xfa, 0xff,
4158 0x59, 0x6a, 0xd3, 0x50, 0x59, 0x43, 0x59, 0xde,
4159 0xd9, 0xf1, 0x55, 0xa5, 0x0c, 0xc3, 0x1a, 0x1a,
4160 0x18, 0x34, 0x0d, 0x1a, 0x63, 0x33, 0xed, 0x10,
4161 0xe0, 0x1d, 0x2a, 0x18, 0xd2, 0xc0, 0x54, 0xa8,
4162 0xca, 0xb5, 0x9a, 0xd3, 0xdd, 0xca, 0x45, 0x84,
4163 0x50, 0xe7, 0x0f, 0xfe, 0xa4, 0x99, 0x5a, 0xbe,
4164 0x43, 0x2d, 0x9a, 0xcb, 0x92, 0x3f, 0x5a, 0x1d,
4165 0x85, 0xd8, 0xc9, 0xdf, 0x68, 0xc9, 0x12, 0x80,
4166 0x56, 0x0c, 0xdc, 0x00, 0xdc, 0x3a, 0x7d, 0x9d,
4167 0xa3, 0xa2, 0xe8, 0x4d, 0xbf, 0xf9, 0x70, 0xa0,
4168 0xa4, 0x13, 0x4f, 0x6b, 0xaf, 0x0a, 0x89, 0x7f,
4169 0xda, 0xf0, 0xbf, 0x9b, 0xc8, 0x1d, 0xe5, 0xf8,
4170 0x2e, 0x8b, 0x07, 0xb5, 0x73, 0x1b, 0xcc, 0xa2,
4171 0xa6, 0xad, 0x30, 0xbc, 0x78, 0x3c, 0x5b, 0x10,
4172 0xfa, 0x5e, 0x62, 0x2d, 0x9e, 0x64, 0xb3, 0x33,
4173 0xce, 0xf9, 0x1f, 0x86, 0xe7, 0x8b, 0xa2, 0xb8,
4174 0xe8, 0x99, 0x57, 0x8c, 0x11, 0xed, 0x66, 0xd9,
4175 0x3c, 0x72, 0xb9, 0xc3, 0xe6, 0x4e, 0x17, 0x3a,
4176 0x6a, 0xcb, 0x42, 0x24, 0x06, 0xed, 0x3e, 0x4e,
4177 0xa3, 0xe8, 0x6a, 0x94, 0xda, 0x0d, 0x4e, 0xd5,
4178 0x14, 0x19, 0xcf, 0xb6, 0x26, 0xd8, 0x2e, 0xcc,
4179 0x64, 0x76, 0x38, 0x49, 0x4d, 0xfe, 0x30, 0x6d,
4180 0xe4, 0xc8, 0x8c, 0x7b, 0xc4, 0xe0, 0x35, 0xba,
4181 0x22, 0x6e, 0x76, 0xe1, 0x1a, 0xf2, 0x53, 0xc3,
4182 0x28, 0xa2, 0x82, 0x1f, 0x61, 0x69, 0xad, 0xc1,
4183 0x7b, 0x28, 0x4b, 0x1e, 0x6c, 0x85, 0x95, 0x9b,
4184 0x51, 0xb5, 0x17, 0x7f, 0x12, 0x69, 0x8c, 0x24,
4185 0xd5, 0xc7, 0x5a, 0x5a, 0x11, 0x54, 0xff, 0x5a,
4186 0xf7, 0x16, 0xc3, 0x91, 0xa6, 0xf0, 0xdc, 0x0a,
4187 0xb6, 0xa7, 0x4a, 0x0d, 0x7a, 0x58, 0xfe, 0xa5,
4188 0xf5, 0xcb, 0x8f, 0x7b, 0x0e, 0xea, 0x57, 0xe7,
4189 0xbd, 0x79, 0xd6, 0x1c, 0x88, 0x23, 0x6c, 0xf2,
4190 0x4d, 0x29, 0x77, 0x53, 0x35, 0x6a, 0x00, 0x8d,
4191 0xcd, 0xa3, 0x58, 0xbe, 0x77, 0x99, 0x18, 0xf8,
4192 0xe6, 0xe1, 0x8f, 0xe9, 0x37, 0x8f, 0xe3, 0xe2,
4193 0x5a, 0x8a, 0x93, 0x25, 0xaf, 0xf3, 0x78, 0x80,
4194 0xbe, 0xa6, 0x1b, 0xc6, 0xac, 0x8b, 0x1c, 0x91,
4195 0x58, 0xe1, 0x9f, 0x89, 0x35, 0x9d, 0x1d, 0x21,
4196 0x29, 0x9f, 0xf4, 0x99, 0x02, 0x27, 0x0f, 0xa8,
4197 0x4f, 0x79, 0x94, 0x2b, 0x33, 0x2c, 0xda, 0xa2,
4198 0x26, 0x39, 0x83, 0x94, 0xef, 0x27, 0xd8, 0x53,
4199 0x8f, 0x66, 0x0d, 0xe4, 0x41, 0x7d, 0x34, 0xcd,
4200 0x43, 0x7c, 0x95, 0x0a, 0x53, 0xef, 0x66, 0xda,
4201 0x7e, 0x9b, 0xf3, 0x93, 0xaf, 0xd0, 0x73, 0x71,
4202 0xba, 0x40, 0x9b, 0x74, 0xf8, 0xd7, 0xd7, 0x41,
4203 0x6d, 0xaf, 0x72, 0x9c, 0x8d, 0x21, 0x87, 0x3c,
4204 0xfd, 0x0a, 0x90, 0xa9, 0x47, 0x96, 0x9e, 0xd3,
4205 0x88, 0xee, 0x73, 0xcf, 0x66, 0x2f, 0x52, 0x56,
4206 0x6d, 0xa9, 0x80, 0x4c, 0xe2, 0x6f, 0x62, 0x88,
4207 0x3f, 0x0e, 0x54, 0x17, 0x48, 0x80, 0x5d, 0xd3,
4208 0xc3, 0xda, 0x25, 0x3d, 0xa1, 0xc8, 0xcb, 0x9f,
4209 0x9b, 0x70, 0xb3, 0xa1, 0xeb, 0x04, 0x52, 0xa1,
4210 0xf2, 0x22, 0x0f, 0xfc, 0xc8, 0x18, 0xfa, 0xf9,
4211 0x85, 0x9c, 0xf1, 0xac, 0xeb, 0x0c, 0x02, 0x46,
4212 0x75, 0xd2, 0xf5, 0x2c, 0xe3, 0xd2, 0x59, 0x94,
4213 0x12, 0xf3, 0x3c, 0xfc, 0xd7, 0x92, 0xfa, 0x36,
4214 0xba, 0x61, 0x34, 0x38, 0x7c, 0xda, 0x48, 0x3e,
4215 0x08, 0xc9, 0x39, 0x23, 0x5e, 0x02, 0x2c, 0x1a,
4216 0x18, 0x7e, 0xb4, 0xd9, 0xfd, 0x9e, 0x40, 0x02,
4217 0xb1, 0x33, 0x37, 0x32, 0xe7, 0xde, 0xd6, 0xd0,
4218 0x7c, 0x58, 0x65, 0x4b, 0xf8, 0x34, 0x27, 0x9c,
4219 0x44, 0xb4, 0xbd, 0xe9, 0xe9, 0x4c, 0x78, 0x7d,
4220 0x4b, 0x9f, 0xce, 0xb1, 0xcd, 0x47, 0xa5, 0x37,
4221 0xe5, 0x6d, 0xbd, 0xb9, 0x43, 0x94, 0x0a, 0xd4,
4222 0xd6, 0xf9, 0x04, 0x5f, 0xb5, 0x66, 0x6c, 0x1a,
4223 0x35, 0x12, 0xe3, 0x36, 0x28, 0x27, 0x36, 0x58,
4224 0x01, 0x2b, 0x79, 0xe4, 0xba, 0x6d, 0x10, 0x7d,
4225 0x65, 0xdf, 0x84, 0x95, 0xf4, 0xd5, 0xb6, 0x8f,
4226 0x2b, 0x9f, 0x96, 0x00, 0x86, 0x60, 0xf0, 0x21,
4227 0x76, 0xa8, 0x6a, 0x8c, 0x28, 0x1c, 0xb3, 0x6b,
4228 0x97, 0xd7, 0xb6, 0x53, 0x2a, 0xcc, 0xab, 0x40,
4229 0x9d, 0x62, 0x79, 0x58, 0x52, 0xe6, 0x65, 0xb7,
4230 0xab, 0x55, 0x67, 0x9c, 0x89, 0x7c, 0x03, 0xb0,
4231 0x73, 0x59, 0xc5, 0x81, 0xf5, 0x18, 0x17, 0x5c,
4232 0x89, 0xf3, 0x78, 0x35, 0x44, 0x62, 0x78, 0x72,
4233 0xd0, 0x96, 0xeb, 0x31, 0xe7, 0x87, 0x77, 0x14,
4234 0x99, 0x51, 0xf2, 0x59, 0x26, 0x9e, 0xb5, 0xa6,
4235 0x45, 0xfe, 0x6e, 0xbd, 0x07, 0x4c, 0x94, 0x5a,
4236 0xa5, 0x7d, 0xfc, 0xf1, 0x2b, 0x77, 0xe2, 0xfe,
4237 0x17, 0xd4, 0x84, 0xa0, 0xac, 0xb5, 0xc7, 0xda,
4238 0xa9, 0x1a, 0xb6, 0xf3, 0x74, 0x11, 0xb4, 0x9d,
4239 0xfb, 0x79, 0x2e, 0x04, 0x2d, 0x50, 0x28, 0x83,
4240 0xbf, 0xc6, 0x52, 0xd3, 0x34, 0xd6, 0xe8, 0x7a,
4241 0xb6, 0xea, 0xe7, 0xa8, 0x6c, 0x15, 0x1e, 0x2c,
4242 0x57, 0xbc, 0x48, 0x4e, 0x5f, 0x5c, 0xb6, 0x92,
4243 0xd2, 0x49, 0x77, 0x81, 0x6d, 0x90, 0x70, 0xae,
4244 0x98, 0xa1, 0x03, 0x0d, 0x6b, 0xb9, 0x77, 0x14,
4245 0xf1, 0x4e, 0x23, 0xd3, 0xf8, 0x68, 0xbd, 0xc2,
4246 0xfe, 0x04, 0xb7, 0x5c, 0xc5, 0x17, 0x60, 0x8f,
4247 0x65, 0x54, 0xa4, 0x7a, 0x42, 0xdc, 0x18, 0x0d,
4248 0xb5, 0xcf, 0x0f, 0xd3, 0xc7, 0x91, 0x66, 0x1b,
4249 0x45, 0x42, 0x27, 0x75, 0x50, 0xe5, 0xee, 0xb8,
4250 0x7f, 0x33, 0x2c, 0xba, 0x4a, 0x92, 0x4d, 0x2c,
4251 0x3c, 0xe3, 0x0d, 0x80, 0x01, 0xba, 0x0d, 0x29,
4252 0xd8, 0x3c, 0xe9, 0x13, 0x16, 0x57, 0xe6, 0xea,
4253 0x94, 0x52, 0xe7, 0x00, 0x4d, 0x30, 0xb0, 0x0f,
4254 0x35, 0xb8, 0xb8, 0xa7, 0xb1, 0xb5, 0x3b, 0x44,
4255 0xe1, 0x2f, 0xfd, 0x88, 0xed, 0x43, 0xe7, 0x52,
4256 0x10, 0x93, 0xb3, 0x8a, 0x30, 0x6b, 0x0a, 0xf7,
4257 0x23, 0xc6, 0x50, 0x9d, 0x4a, 0xb0, 0xde, 0xc3,
4258 0xdc, 0x9b, 0x2f, 0x01, 0x56, 0x36, 0x09, 0xc5,
4259 0x2f, 0x6b, 0xfe, 0xf1, 0xd8, 0x27, 0x45, 0x03,
4260 0x30, 0x5e, 0x5c, 0x5b, 0xb4, 0x62, 0x0e, 0x1a,
4261 0xa9, 0x21, 0x2b, 0x92, 0x94, 0x87, 0x62, 0x57,
4262 0x4c, 0x10, 0x74, 0x1a, 0xf1, 0x0a, 0xc5, 0x84,
4263 0x3b, 0x9e, 0x72, 0x02, 0xd7, 0xcc, 0x09, 0x56,
4264 0xbd, 0x54, 0xc1, 0xf0, 0xc3, 0xe3, 0xb3, 0xf8,
4265 0xd2, 0x0d, 0x61, 0xcb, 0xef, 0xce, 0x0d, 0x05,
4266 0xb0, 0x98, 0xd9, 0x8e, 0x4f, 0xf9, 0xbc, 0x93,
4267 0xa6, 0xea, 0xc8, 0xcf, 0x10, 0x53, 0x4b, 0xf1,
4268 0xec, 0xfc, 0x89, 0xf9, 0x64, 0xb0, 0x22, 0xbf,
4269 0x9e, 0x55, 0x46, 0x9f, 0x7c, 0x50, 0x8e, 0x84,
4270 0x54, 0x20, 0x98, 0xd7, 0x6c, 0x40, 0x1e, 0xdb,
4271 0x69, 0x34, 0x78, 0x61, 0x24, 0x21, 0x9c, 0x8a,
4272 0xb3, 0x62, 0x31, 0x8b, 0x6e, 0xf5, 0x2a, 0x35,
4273 0x86, 0x13, 0xb1, 0x6c, 0x64, 0x2e, 0x41, 0xa5,
4274 0x05, 0xf2, 0x42, 0xba, 0xd2, 0x3a, 0x0d, 0x8e,
4275 0x8a, 0x59, 0x94, 0x3c, 0xcf, 0x36, 0x27, 0x82,
4276 0xc2, 0x45, 0xee, 0x58, 0xcd, 0x88, 0xb4, 0xec,
4277 0xde, 0xb2, 0x96, 0x0a, 0xaf, 0x38, 0x6f, 0x88,
4278 0xd7, 0xd8, 0xe1, 0xdf, 0xb9, 0x96, 0xa9, 0x0a,
4279 0xb1, 0x95, 0x28, 0x86, 0x20, 0xe9, 0x17, 0x49,
4280 0xa2, 0x29, 0x38, 0xaa, 0xa5, 0xe9, 0x6e, 0xf1,
4281 0x19, 0x27, 0xc0, 0xd5, 0x2a, 0x22, 0xc3, 0x0b,
4282 0xdb, 0x7c, 0x73, 0x10, 0xb9, 0xba, 0x89, 0x76,
4283 0x54, 0xae, 0x7d, 0x71, 0xb3, 0x93, 0xf6, 0x32,
4284 0xe6, 0x47, 0x43, 0x55, 0xac, 0xa0, 0x0d, 0xc2,
4285 0x93, 0x27, 0x4a, 0x8e, 0x0e, 0x74, 0x15, 0xc7,
4286 0x0b, 0x85, 0xd9, 0x0c, 0xa9, 0x30, 0x7a, 0x3e,
4287 0xea, 0x8f, 0x85, 0x6d, 0x3a, 0x12, 0x4f, 0x72,
4288 0x69, 0x58, 0x7a, 0x80, 0xbb, 0xb5, 0x97, 0xf3,
4289 0xcf, 0x70, 0xd2, 0x5d, 0xdd, 0x4d, 0x21, 0x79,
4290 0x54, 0x4d, 0xe4, 0x05, 0xe8, 0xbd, 0xc2, 0x62,
4291 0xb1, 0x3b, 0x77, 0x1c, 0xd6, 0x5c, 0xf3, 0xa0,
4292 0x79, 0x00, 0xa8, 0x6c, 0x29, 0xd9, 0x18, 0x24,
4293 0x36, 0xa2, 0x46, 0xc0, 0x96, 0x65, 0x7f, 0xbd,
4294 0x2a, 0xed, 0x36, 0x16, 0x0c, 0xaa, 0x9f, 0xf4,
4295 0xc5, 0xb4, 0xe2, 0x12, 0xed, 0x69, 0xed, 0x4f,
4296 0x26, 0x2c, 0x39, 0x52, 0x89, 0x98, 0xe7, 0x2c,
4297 0x99, 0xa4, 0x9e, 0xa3, 0x9b, 0x99, 0x46, 0x7a,
4298 0x3a, 0xdc, 0xa8, 0x59, 0xa3, 0xdb, 0xc3, 0x3b,
4299 0x95, 0x0d, 0x3b, 0x09, 0x6e, 0xee, 0x83, 0x5d,
4300 0x32, 0x4d, 0xed, 0xab, 0xfa, 0x98, 0x14, 0x4e,
4301 0xc3, 0x15, 0x45, 0x53, 0x61, 0xc4, 0x93, 0xbd,
4302 0x90, 0xf4, 0x99, 0x95, 0x4c, 0xe6, 0x76, 0x92,
4303 0x29, 0x90, 0x46, 0x30, 0x92, 0x69, 0x7d, 0x13,
4304 0xf2, 0xa5, 0xcd, 0x69, 0x49, 0x44, 0xb2, 0x0f,
4305 0x63, 0x40, 0x36, 0x5f, 0x09, 0xe2, 0x78, 0xf8,
4306 0x91, 0xe3, 0xe2, 0xfa, 0x10, 0xf7, 0xc8, 0x24,
4307 0xa8, 0x89, 0x32, 0x5c, 0x37, 0x25, 0x1d, 0xb2,
4308 0xea, 0x17, 0x8a, 0x0a, 0xa9, 0x64, 0xc3, 0x7c,
4309 0x3c, 0x7c, 0xbd, 0xc6, 0x79, 0x34, 0xe7, 0xe2,
4310 0x85, 0x8e, 0xbf, 0xf8, 0xde, 0x92, 0xa0, 0xae,
4311 0x20, 0xc4, 0xf6, 0xbb, 0x1f, 0x38, 0x19, 0x0e,
4312 0xe8, 0x79, 0x9c, 0xa1, 0x23, 0xe9, 0x54, 0x7e,
4313 0x37, 0x2f, 0xe2, 0x94, 0x32, 0xaf, 0xa0, 0x23,
4314 0x49, 0xe4, 0xc0, 0xb3, 0xac, 0x00, 0x8f, 0x36,
4315 0x05, 0xc4, 0xa6, 0x96, 0xec, 0x05, 0x98, 0x4f,
4316 0x96, 0x67, 0x57, 0x1f, 0x20, 0x86, 0x1b, 0x2d,
4317 0x69, 0xe4, 0x29, 0x93, 0x66, 0x5f, 0xaf, 0x6b,
4318 0x88, 0x26, 0x2c, 0x67, 0x02, 0x4b, 0x52, 0xd0,
4319 0x83, 0x7a, 0x43, 0x1f, 0xc0, 0x71, 0x15, 0x25,
4320 0x77, 0x65, 0x08, 0x60, 0x11, 0x76, 0x4c, 0x8d,
4321 0xed, 0xa9, 0x27, 0xc6, 0xb1, 0x2a, 0x2c, 0x6a,
4322 0x4a, 0x97, 0xf5, 0xc6, 0xb7, 0x70, 0x42, 0xd3,
4323 0x03, 0xd1, 0x24, 0x95, 0xec, 0x6d, 0xab, 0x38,
4324 0x72, 0xce, 0xe2, 0x8b, 0x33, 0xd7, 0x51, 0x09,
4325 0xdc, 0x45, 0xe0, 0x09, 0x96, 0x32, 0xf3, 0xc4,
4326 0x84, 0xdc, 0x73, 0x73, 0x2d, 0x1b, 0x11, 0x98,
4327 0xc5, 0x0e, 0x69, 0x28, 0x94, 0xc7, 0xb5, 0x4d,
4328 0xc8, 0x8a, 0xd0, 0xaa, 0x13, 0x2e, 0x18, 0x74,
4329 0xdd, 0xd1, 0x1e, 0xf3, 0x90, 0xe8, 0xfc, 0x9a,
4330 0x72, 0x4a, 0x0e, 0xd1, 0xe4, 0xfb, 0x0d, 0x96,
4331 0xd1, 0x0c, 0x79, 0x85, 0x1b, 0x1c, 0xfe, 0xe1,
4332 0x62, 0x8f, 0x7a, 0x73, 0x32, 0xab, 0xc8, 0x18,
4333 0x69, 0xe3, 0x34, 0x30, 0xdf, 0x13, 0xa6, 0xe5,
4334 0xe8, 0x0e, 0x67, 0x7f, 0x81, 0x11, 0xb4, 0x60,
4335 0xc7, 0xbd, 0x79, 0x65, 0x50, 0xdc, 0xc4, 0x5b,
4336 0xde, 0x39, 0xa4, 0x01, 0x72, 0x63, 0xf3, 0xd1,
4337 0x64, 0x4e, 0xdf, 0xfc, 0x27, 0x92, 0x37, 0x0d,
4338 0x57, 0xcd, 0x11, 0x4f, 0x11, 0x04, 0x8e, 0x1d,
4339 0x16, 0xf7, 0xcd, 0x92, 0x9a, 0x99, 0x30, 0x14,
4340 0xf1, 0x7c, 0x67, 0x1b, 0x1f, 0x41, 0x0b, 0xe8,
4341 0x32, 0xe8, 0xb8, 0xc1, 0x4f, 0x54, 0x86, 0x4f,
4342 0xe5, 0x79, 0x81, 0x73, 0xcd, 0x43, 0x59, 0x68,
4343 0x73, 0x02, 0x3b, 0x78, 0x21, 0x72, 0x43, 0x00,
4344 0x49, 0x17, 0xf7, 0x00, 0xaf, 0x68, 0x24, 0x53,
4345 0x05, 0x0a, 0xc3, 0x33, 0xe0, 0x33, 0x3f, 0x69,
4346 0xd2, 0x84, 0x2f, 0x0b, 0xed, 0xde, 0x04, 0xf4,
4347 0x11, 0x94, 0x13, 0x69, 0x51, 0x09, 0x28, 0xde,
4348 0x57, 0x5c, 0xef, 0xdc, 0x9a, 0x49, 0x1c, 0x17,
4349 0x97, 0xf3, 0x96, 0xc1, 0x7f, 0x5d, 0x2e, 0x7d,
4350 0x55, 0xb8, 0xb3, 0x02, 0x09, 0xb3, 0x1f, 0xe7,
4351 0xc9, 0x8d, 0xa3, 0x36, 0x34, 0x8a, 0x77, 0x13,
4352 0x30, 0x63, 0x4c, 0xa5, 0xcd, 0xc3, 0xe0, 0x7e,
4353 0x05, 0xa1, 0x7b, 0x0c, 0xcb, 0x74, 0x47, 0x31,
4354 0x62, 0x03, 0x43, 0xf1, 0x87, 0xb4, 0xb0, 0x85,
4355 0x87, 0x8e, 0x4b, 0x25, 0xc7, 0xcf, 0xae, 0x4b,
4356 0x36, 0x46, 0x3e, 0x62, 0xbc, 0x6f, 0xeb, 0x5f,
4357 0x73, 0xac, 0xe6, 0x07, 0xee, 0xc1, 0xa1, 0xd6,
4358 0xc4, 0xab, 0xc9, 0xd6, 0x89, 0x45, 0xe1, 0xf1,
4359 0x04, 0x4e, 0x1a, 0x6f, 0xbb, 0x4f, 0x3a, 0xa3,
4360 0xa0, 0xcb, 0xa3, 0x0a, 0xd8, 0x71, 0x35, 0x55,
4361 0xe4, 0xbc, 0x2e, 0x04, 0x06, 0xe6, 0xff, 0x5b,
4362 0x1c, 0xc0, 0x11, 0x7c, 0xc5, 0x17, 0xf3, 0x38,
4363 0xcf, 0xe9, 0xba, 0x0f, 0x0e, 0xef, 0x02, 0xc2,
4364 0x8d, 0xc6, 0xbc, 0x4b, 0x67, 0x20, 0x95, 0xd7,
4365 0x2c, 0x45, 0x5b, 0x86, 0x44, 0x8c, 0x6f, 0x2e,
4366 0x7e, 0x9f, 0x1c, 0x77, 0xba, 0x6b, 0x0e, 0xa3,
4367 0x69, 0xdc, 0xab, 0x24, 0x57, 0x60, 0x47, 0xc1,
4368 0xd1, 0xa5, 0x9d, 0x23, 0xe6, 0xb1, 0x37, 0xfe,
4369 0x93, 0xd2, 0x4c, 0x46, 0xf9, 0x0c, 0xc6, 0xfb,
4370 0xd6, 0x9d, 0x99, 0x69, 0xab, 0x7a, 0x07, 0x0c,
4371 0x65, 0xe7, 0xc4, 0x08, 0x96, 0xe2, 0xa5, 0x01,
4372 0x3f, 0x46, 0x07, 0x05, 0x7e, 0xe8, 0x9a, 0x90,
4373 0x50, 0xdc, 0xe9, 0x7a, 0xea, 0xa1, 0x39, 0x6e,
4374 0x66, 0xe4, 0x6f, 0xa5, 0x5f, 0xb2, 0xd9, 0x5b,
4375 0xf5, 0xdb, 0x2a, 0x32, 0xf0, 0x11, 0x6f, 0x7c,
4376 0x26, 0x10, 0x8f, 0x3d, 0x80, 0xe9, 0x58, 0xf7,
4377 0xe0, 0xa8, 0x57, 0xf8, 0xdb, 0x0e, 0xce, 0x99,
4378 0x63, 0x19, 0x3d, 0xd5, 0xec, 0x1b, 0x77, 0x69,
4379 0x98, 0xf6, 0xe4, 0x5f, 0x67, 0x17, 0x4b, 0x09,
4380 0x85, 0x62, 0x82, 0x70, 0x18, 0xe2, 0x9a, 0x78,
4381 0xe2, 0x62, 0xbd, 0xb4, 0xf1, 0x42, 0xc6, 0xfb,
4382 0x08, 0xd0, 0xbd, 0xeb, 0x4e, 0x09, 0xf2, 0xc8,
4383 0x1e, 0xdc, 0x3d, 0x32, 0x21, 0x56, 0x9c, 0x4f,
4384 0x35, 0xf3, 0x61, 0x06, 0x72, 0x84, 0xc4, 0x32,
4385 0xf2, 0xf1, 0xfa, 0x0b, 0x2f, 0xc3, 0xdb, 0x02,
4386 0x04, 0xc2, 0xde, 0x57, 0x64, 0x60, 0x8d, 0xcf,
4387 0xcb, 0x86, 0x5d, 0x97, 0x3e, 0xb1, 0x9c, 0x01,
4388 0xd6, 0x28, 0x8f, 0x99, 0xbc, 0x46, 0xeb, 0x05,
4389 0xaf, 0x7e, 0xb8, 0x21, 0x2a, 0x56, 0x85, 0x1c,
4390 0xb3, 0x71, 0xa0, 0xde, 0xca, 0x96, 0xf1, 0x78,
4391 0x49, 0xa2, 0x99, 0x81, 0x80, 0x5c, 0x01, 0xf5,
4392 0xa0, 0xa2, 0x56, 0x63, 0xe2, 0x70, 0x07, 0xa5,
4393 0x95, 0xd6, 0x85, 0xeb, 0x36, 0x9e, 0xa9, 0x51,
4394 0x66, 0x56, 0x5f, 0x1d, 0x02, 0x19, 0xe2, 0xf6,
4395 0x4f, 0x73, 0x38, 0x09, 0x75, 0x64, 0x48, 0xe0,
4396 0xf1, 0x7e, 0x0e, 0xe8, 0x9d, 0xf9, 0xed, 0x94,
4397 0xfe, 0x16, 0x26, 0x62, 0x49, 0x74, 0xf4, 0xb0,
4398 0xd4, 0xa9, 0x6c, 0xb0, 0xfd, 0x53, 0xe9, 0x81,
4399 0xe0, 0x7a, 0xbf, 0xcf, 0xb5, 0xc4, 0x01, 0x81,
4400 0x79, 0x99, 0x77, 0x01, 0x3b, 0xe9, 0xa2, 0xb6,
4401 0xe6, 0x6a, 0x8a, 0x9e, 0x56, 0x1c, 0x8d, 0x1e,
4402 0x8f, 0x06, 0x55, 0x2c, 0x6c, 0xdc, 0x92, 0x87,
4403 0x64, 0x3b, 0x4b, 0x19, 0xa1, 0x13, 0x64, 0x1d,
4404 0x4a, 0xe9, 0xc0, 0x00, 0xb8, 0x95, 0xef, 0x6b,
4405 0x1a, 0x86, 0x6d, 0x37, 0x52, 0x02, 0xc2, 0xe0,
4406 0xc8, 0xbb, 0x42, 0x0c, 0x02, 0x21, 0x4a, 0xc9,
4407 0xef, 0xa0, 0x54, 0xe4, 0x5e, 0x16, 0x53, 0x81,
4408 0x70, 0x62, 0x10, 0xaf, 0xde, 0xb8, 0xb5, 0xd3,
4409 0xe8, 0x5e, 0x6c, 0xc3, 0x8a, 0x3e, 0x18, 0x07,
4410 0xf2, 0x2f, 0x7d, 0xa7, 0xe1, 0x3d, 0x4e, 0xb4,
4411 0x26, 0xa7, 0xa3, 0x93, 0x86, 0xb2, 0x04, 0x1e,
4412 0x53, 0x5d, 0x86, 0xd6, 0xde, 0x65, 0xca, 0xe3,
4413 0x4e, 0xc1, 0xcf, 0xef, 0xc8, 0x70, 0x1b, 0x83,
4414 0x13, 0xdd, 0x18, 0x8b, 0x0d, 0x76, 0xd2, 0xf6,
4415 0x37, 0x7a, 0x93, 0x7a, 0x50, 0x11, 0x9f, 0x96,
4416 0x86, 0x25, 0xfd, 0xac, 0xdc, 0xbe, 0x18, 0x93,
4417 0x19, 0x6b, 0xec, 0x58, 0x4f, 0xb9, 0x75, 0xa7,
4418 0xdd, 0x3f, 0x2f, 0xec, 0xc8, 0x5a, 0x84, 0xab,
4419 0xd5, 0xe4, 0x8a, 0x07, 0xf6, 0x4d, 0x23, 0xd6,
4420 0x03, 0xfb, 0x03, 0x6a, 0xea, 0x66, 0xbf, 0xd4,
4421 0xb1, 0x34, 0xfb, 0x78, 0xe9, 0x55, 0xdc, 0x7c,
4422 0x3d, 0x9c, 0xe5, 0x9a, 0xac, 0xc3, 0x7a, 0x80,
4423 0x24, 0x6d, 0xa0, 0xef, 0x25, 0x7c, 0xb7, 0xea,
4424 0xce, 0x4d, 0x5f, 0x18, 0x60, 0xce, 0x87, 0x22,
4425 0x66, 0x2f, 0xd5, 0xdd, 0xdd, 0x02, 0x21, 0x75,
4426 0x82, 0xa0, 0x1f, 0x58, 0xc6, 0xd3, 0x62, 0xf7,
4427 0x32, 0xd8, 0xaf, 0x1e, 0x07, 0x77, 0x51, 0x96,
4428 0xd5, 0x6b, 0x1e, 0x7e, 0x80, 0x02, 0xe8, 0x67,
4429 0xea, 0x17, 0x0b, 0x10, 0xd2, 0x3f, 0x28, 0x25,
4430 0x4f, 0x05, 0x77, 0x02, 0x14, 0x69, 0xf0, 0x2c,
4431 0xbe, 0x0c, 0xf1, 0x74, 0x30, 0xd1, 0xb9, 0x9b,
4432 0xfc, 0x8c, 0xbb, 0x04, 0x16, 0xd9, 0xba, 0xc3,
4433 0xbc, 0x91, 0x8a, 0xc4, 0x30, 0xa4, 0xb0, 0x12,
4434 0x4c, 0x21, 0x87, 0xcb, 0xc9, 0x1d, 0x16, 0x96,
4435 0x07, 0x6f, 0x23, 0x54, 0xb9, 0x6f, 0x79, 0xe5,
4436 0x64, 0xc0, 0x64, 0xda, 0xb1, 0xae, 0xdd, 0x60,
4437 0x6c, 0x1a, 0x9d, 0xd3, 0x04, 0x8e, 0x45, 0xb0,
4438 0x92, 0x61, 0xd0, 0x48, 0x81, 0xed, 0x5e, 0x1d,
4439 0xa0, 0xc9, 0xa4, 0x33, 0xc7, 0x13, 0x51, 0x5d,
4440 0x7f, 0x83, 0x73, 0xb6, 0x70, 0x18, 0x65, 0x3e,
4441 0x2f, 0x0e, 0x7a, 0x12, 0x39, 0x98, 0xab, 0xd8,
4442 0x7e, 0x6f, 0xa3, 0xd1, 0xba, 0x56, 0xad, 0xbd,
4443 0xf0, 0x03, 0x01, 0x1c, 0x85, 0x35, 0x9f, 0xeb,
4444 0x19, 0x63, 0xa1, 0xaf, 0xfe, 0x2d, 0x35, 0x50,
4445 0x39, 0xa0, 0x65, 0x7c, 0x95, 0x7e, 0x6b, 0xfe,
4446 0xc1, 0xac, 0x07, 0x7c, 0x98, 0x4f, 0xbe, 0x57,
4447 0xa7, 0x22, 0xec, 0xe2, 0x7e, 0x29, 0x09, 0x53,
4448 0xe8, 0xbf, 0xb4, 0x7e, 0x3f, 0x8f, 0xfc, 0x14,
4449 0xce, 0x54, 0xf9, 0x18, 0x58, 0xb5, 0xff, 0x44,
4450 0x05, 0x9d, 0xce, 0x1b, 0xb6, 0x82, 0x23, 0xc8,
4451 0x2e, 0xbc, 0x69, 0xbb, 0x4a, 0x29, 0x0f, 0x65,
4452 0x94, 0xf0, 0x63, 0x06, 0x0e, 0xef, 0x8c, 0xbd,
4453 0xff, 0xfd, 0xb0, 0x21, 0x6e, 0x57, 0x05, 0x75,
4454 0xda, 0xd5, 0xc4, 0xeb, 0x8d, 0x32, 0xf7, 0x50,
4455 0xd3, 0x6f, 0x22, 0xed, 0x5f, 0x8e, 0xa2, 0x5b,
4456 0x80, 0x8c, 0xc8, 0x78, 0x40, 0x24, 0x4b, 0x89,
4457 0x30, 0xce, 0x7a, 0x97, 0x0e, 0xc4, 0xaf, 0xef,
4458 0x9b, 0xb4, 0xcd, 0x66, 0x74, 0x14, 0x04, 0x2b,
4459 0xf7, 0xce, 0x0b, 0x1c, 0x6e, 0xc2, 0x78, 0x8c,
4460 0xca, 0xc5, 0xd0, 0x1c, 0x95, 0x4a, 0x91, 0x2d,
4461 0xa7, 0x20, 0xeb, 0x86, 0x52, 0xb7, 0x67, 0xd8,
4462 0x0c, 0xd6, 0x04, 0x14, 0xde, 0x51, 0x74, 0x75,
4463 0xe7, 0x11, 0xb4, 0x87, 0xa3, 0x3d, 0x2d, 0xad,
4464 0x4f, 0xef, 0xa0, 0x0f, 0x70, 0x00, 0x6d, 0x13,
4465 0x19, 0x1d, 0x41, 0x50, 0xe9, 0xd8, 0xf0, 0x32,
4466 0x71, 0xbc, 0xd3, 0x11, 0xf2, 0xac, 0xbe, 0xaf,
4467 0x75, 0x46, 0x65, 0x4e, 0x07, 0x34, 0x37, 0xa3,
4468 0x89, 0xfe, 0x75, 0xd4, 0x70, 0x4c, 0xc6, 0x3f,
4469 0x69, 0x24, 0x0e, 0x38, 0x67, 0x43, 0x8c, 0xde,
4470 0x06, 0xb5, 0xb8, 0xe7, 0xc4, 0xf0, 0x41, 0x8f,
4471 0xf0, 0xbd, 0x2f, 0x0b, 0xb9, 0x18, 0xf8, 0xde,
4472 0x64, 0xb1, 0xdb, 0xee, 0x00, 0x50, 0x77, 0xe1,
4473 0xc7, 0xff, 0xa6, 0xfa, 0xdd, 0x70, 0xf4, 0xe3,
4474 0x93, 0xe9, 0x77, 0x35, 0x3d, 0x4b, 0x2f, 0x2b,
4475 0x6d, 0x55, 0xf0, 0xfc, 0x88, 0x54, 0x4e, 0x89,
4476 0xc1, 0x8a, 0x23, 0x31, 0x2d, 0x14, 0x2a, 0xb8,
4477 0x1b, 0x15, 0xdd, 0x9e, 0x6e, 0x7b, 0xda, 0x05,
4478 0x91, 0x7d, 0x62, 0x64, 0x96, 0x72, 0xde, 0xfc,
4479 0xc1, 0xec, 0xf0, 0x23, 0x51, 0x6f, 0xdb, 0x5b,
4480 0x1d, 0x08, 0x57, 0xce, 0x09, 0xb8, 0xf6, 0xcd,
4481 0x8d, 0x95, 0xf2, 0x20, 0xbf, 0x0f, 0x20, 0x57,
4482 0x98, 0x81, 0x84, 0x4f, 0x15, 0x5c, 0x76, 0xe7,
4483 0x3e, 0x0a, 0x3a, 0x6c, 0xc4, 0x8a, 0xbe, 0x78,
4484 0x74, 0x77, 0xc3, 0x09, 0x4b, 0x5d, 0x48, 0xe4,
4485 0xc8, 0xcb, 0x0b, 0xea, 0x17, 0x28, 0xcf, 0xcf,
4486 0x31, 0x32, 0x44, 0xa4, 0xe5, 0x0e, 0x1a, 0x98,
4487 0x94, 0xc4, 0xf0, 0xff, 0xae, 0x3e, 0x44, 0xe8,
4488 0xa5, 0xb3, 0xb5, 0x37, 0x2f, 0xe8, 0xaf, 0x6f,
4489 0x28, 0xc1, 0x37, 0x5f, 0x31, 0xd2, 0xb9, 0x33,
4490 0xb1, 0xb2, 0x52, 0x94, 0x75, 0x2c, 0x29, 0x59,
4491 0x06, 0xc2, 0x25, 0xe8, 0x71, 0x65, 0x4e, 0xed,
4492 0xc0, 0x9c, 0xb1, 0xbb, 0x25, 0xdc, 0x6c, 0xe7,
4493 0x4b, 0xa5, 0x7a, 0x54, 0x7a, 0x60, 0xff, 0x7a,
4494 0xe0, 0x50, 0x40, 0x96, 0x35, 0x63, 0xe4, 0x0b,
4495 0x76, 0xbd, 0xa4, 0x65, 0x00, 0x1b, 0x57, 0x88,
4496 0xae, 0xed, 0x39, 0x88, 0x42, 0x11, 0x3c, 0xed,
4497 0x85, 0x67, 0x7d, 0xb9, 0x68, 0x82, 0xe9, 0x43,
4498 0x3c, 0x47, 0x53, 0xfa, 0xe8, 0xf8, 0x9f, 0x1f,
4499 0x9f, 0xef, 0x0f, 0xf7, 0x30, 0xd9, 0x30, 0x0e,
4500 0xb9, 0x9f, 0x69, 0x18, 0x2f, 0x7e, 0xf8, 0xf8,
4501 0xf8, 0x8c, 0x0f, 0xd4, 0x02, 0x4d, 0xea, 0xcd,
4502 0x0a, 0x9c, 0x6f, 0x71, 0x6d, 0x5a, 0x4c, 0x60,
4503 0xce, 0x20, 0x56, 0x32, 0xc6, 0xc5, 0x99, 0x1f,
4504 0x09, 0xe6, 0x4e, 0x18, 0x1a, 0x15, 0x13, 0xa8,
4505 0x7d, 0xb1, 0x6b, 0xc0, 0xb2, 0x6d, 0xf8, 0x26,
4506 0x66, 0xf8, 0x3d, 0x18, 0x74, 0x70, 0x66, 0x7a,
4507 0x34, 0x17, 0xde, 0xba, 0x47, 0xf1, 0x06, 0x18,
4508 0xcb, 0xaf, 0xeb, 0x4a, 0x1e, 0x8f, 0xa7, 0x77,
4509 0xe0, 0x3b, 0x78, 0x62, 0x66, 0xc9, 0x10, 0xea,
4510 0x1f, 0xb7, 0x29, 0x0a, 0x45, 0xa1, 0x1d, 0x1e,
4511 0x1d, 0xe2, 0x65, 0x61, 0x50, 0x9c, 0xd7, 0x05,
4512 0xf2, 0x0b, 0x5b, 0x12, 0x61, 0x02, 0xc8, 0xe5,
4513 0x63, 0x4f, 0x20, 0x0c, 0x07, 0x17, 0x33, 0x5e,
4514 0x03, 0x9a, 0x53, 0x0f, 0x2e, 0x55, 0xfe, 0x50,
4515 0x43, 0x7d, 0xd0, 0xb6, 0x7e, 0x5a, 0xda, 0xae,
4516 0x58, 0xef, 0x15, 0xa9, 0x83, 0xd9, 0x46, 0xb1,
4517 0x42, 0xaa, 0xf5, 0x02, 0x6c, 0xce, 0x92, 0x06,
4518 0x1b, 0xdb, 0x66, 0x45, 0x91, 0x79, 0xc2, 0x2d,
4519 0xe6, 0x53, 0xd3, 0x14, 0xfd, 0xbb, 0x44, 0x63,
4520 0xc6, 0xd7, 0x3d, 0x7a, 0x0c, 0x75, 0x78, 0x9d,
4521 0x5c, 0xa6, 0x39, 0xb3, 0xe5, 0x63, 0xca, 0x8b,
4522 0xfe, 0xd3, 0xef, 0x60, 0x83, 0xf6, 0x8e, 0x70,
4523 0xb6, 0x67, 0xc7, 0x77, 0xed, 0x23, 0xef, 0x4c,
4524 0xf0, 0xed, 0x2d, 0x07, 0x59, 0x6f, 0xc1, 0x01,
4525 0x34, 0x37, 0x08, 0xab, 0xd9, 0x1f, 0x09, 0xb1,
4526 0xce, 0x5b, 0x17, 0xff, 0x74, 0xf8, 0x9c, 0xd5,
4527 0x2c, 0x56, 0x39, 0x79, 0x0f, 0x69, 0x44, 0x75,
4528 0x58, 0x27, 0x01, 0xc4, 0xbf, 0xa7, 0xa1, 0x1d,
4529 0x90, 0x17, 0x77, 0x86, 0x5a, 0x3f, 0xd9, 0xd1,
4530 0x0e, 0xa0, 0x10, 0xf8, 0xec, 0x1e, 0xa5, 0x7f,
4531 0x5e, 0x36, 0xd1, 0xe3, 0x04, 0x2c, 0x70, 0xf7,
4532 0x8e, 0xc0, 0x98, 0x2f, 0x6c, 0x94, 0x2b, 0x41,
4533 0xb7, 0x60, 0x00, 0xb7, 0x2e, 0xb8, 0x02, 0x8d,
4534 0xb8, 0xb0, 0xd3, 0x86, 0xba, 0x1d, 0xd7, 0x90,
4535 0xd6, 0xb6, 0xe1, 0xfc, 0xd7, 0xd8, 0x28, 0x06,
4536 0x63, 0x9b, 0xce, 0x61, 0x24, 0x79, 0xc0, 0x70,
4537 0x52, 0xd0, 0xb6, 0xd4, 0x28, 0x95, 0x24, 0x87,
4538 0x03, 0x1f, 0xb7, 0x9a, 0xda, 0xa3, 0xfb, 0x52,
4539 0x5b, 0x68, 0xe7, 0x4c, 0x8c, 0x24, 0xe1, 0x42,
4540 0xf7, 0xd5, 0xfd, 0xad, 0x06, 0x32, 0x9f, 0xba,
4541 0xc1, 0xfc, 0xdd, 0xc6, 0xfc, 0xfc, 0xb3, 0x38,
4542 0x74, 0x56, 0x58, 0x40, 0x02, 0x37, 0x52, 0x2c,
4543 0x55, 0xcc, 0xb3, 0x9e, 0x7a, 0xe9, 0xd4, 0x38,
4544 0x41, 0x5e, 0x0c, 0x35, 0xe2, 0x11, 0xd1, 0x13,
4545 0xf8, 0xb7, 0x8d, 0x72, 0x6b, 0x22, 0x2a, 0xb0,
4546 0xdb, 0x08, 0xba, 0x35, 0xb9, 0x3f, 0xc8, 0xd3,
4547 0x24, 0x90, 0xec, 0x58, 0xd2, 0x09, 0xc7, 0x2d,
4548 0xed, 0x38, 0x80, 0x36, 0x72, 0x43, 0x27, 0x49,
4549 0x4a, 0x80, 0x8a, 0xa2, 0xe8, 0xd3, 0xda, 0x30,
4550 0x7d, 0xb6, 0x82, 0x37, 0x86, 0x92, 0x86, 0x3e,
4551 0x08, 0xb2, 0x28, 0x5a, 0x55, 0x44, 0x24, 0x7d,
4552 0x40, 0x48, 0x8a, 0xb6, 0x89, 0x58, 0x08, 0xa0,
4553 0xd6, 0x6d, 0x3a, 0x17, 0xbf, 0xf6, 0x54, 0xa2,
4554 0xf5, 0xd3, 0x8c, 0x0f, 0x78, 0x12, 0x57, 0x8b,
4555 0xd5, 0xc2, 0xfd, 0x58, 0x5b, 0x7f, 0x38, 0xe3,
4556 0xcc, 0xb7, 0x7c, 0x48, 0xb3, 0x20, 0xe8, 0x81,
4557 0x14, 0x32, 0x45, 0x05, 0xe0, 0xdb, 0x9f, 0x75,
4558 0x85, 0xb4, 0x6a, 0xfc, 0x95, 0xe3, 0x54, 0x22,
4559 0x12, 0xee, 0x30, 0xfe, 0xd8, 0x30, 0xef, 0x34,
4560 0x50, 0xab, 0x46, 0x30, 0x98, 0x2f, 0xb7, 0xc0,
4561 0x15, 0xa2, 0x83, 0xb6, 0xf2, 0x06, 0x21, 0xa2,
4562 0xc3, 0x26, 0x37, 0x14, 0xd1, 0x4d, 0xb5, 0x10,
4563 0x52, 0x76, 0x4d, 0x6a, 0xee, 0xb5, 0x2b, 0x15,
4564 0xb7, 0xf9, 0x51, 0xe8, 0x2a, 0xaf, 0xc7, 0xfa,
4565 0x77, 0xaf, 0xb0, 0x05, 0x4d, 0xd1, 0x68, 0x8e,
4566 0x74, 0x05, 0x9f, 0x9d, 0x93, 0xa5, 0x3e, 0x7f,
4567 0x4e, 0x5f, 0x9d, 0xcb, 0x09, 0xc7, 0x83, 0xe3,
4568 0x02, 0x9d, 0x27, 0x1f, 0xef, 0x85, 0x05, 0x8d,
4569 0xec, 0x55, 0x88, 0x0f, 0x0d, 0x7c, 0x4c, 0xe8,
4570 0xa1, 0x75, 0xa0, 0xd8, 0x06, 0x47, 0x14, 0xef,
4571 0xaa, 0x61, 0xcf, 0x26, 0x15, 0xad, 0xd8, 0xa3,
4572 0xaa, 0x75, 0xf2, 0x78, 0x4a, 0x5a, 0x61, 0xdf,
4573 0x8b, 0xc7, 0x04, 0xbc, 0xb2, 0x32, 0xd2, 0x7e,
4574 0x42, 0xee, 0xb4, 0x2f, 0x51, 0xff, 0x7b, 0x2e,
4575 0xd3, 0x02, 0xe8, 0xdc, 0x5d, 0x0d, 0x50, 0xdc,
4576 0xae, 0xb7, 0x46, 0xf9, 0xa8, 0xe6, 0xd0, 0x16,
4577 0xcc, 0xe6, 0x2c, 0x81, 0xc7, 0xad, 0xe9, 0xf0,
4578 0x05, 0x72, 0x6d, 0x3d, 0x0a, 0x7a, 0xa9, 0x02,
4579 0xac, 0x82, 0x93, 0x6e, 0xb6, 0x1c, 0x28, 0xfc,
4580 0x44, 0x12, 0xfb, 0x73, 0x77, 0xd4, 0x13, 0x39,
4581 0x29, 0x88, 0x8a, 0xf3, 0x5c, 0xa6, 0x36, 0xa0,
4582 0x2a, 0xed, 0x7e, 0xb1, 0x1d, 0xd6, 0x4c, 0x6b,
4583 0x41, 0x01, 0x18, 0x5d, 0x5d, 0x07, 0x97, 0xa6,
4584 0x4b, 0xef, 0x31, 0x18, 0xea, 0xac, 0xb1, 0x84,
4585 0x21, 0xed, 0xda, 0x86,
4586 },
4587 .rlen = 4100,
4588 },
4589};
4590
4591static struct cipher_testvec aes_ctr_dec_tv_template[] = {
4592 { /* From RFC 3686 */
4593 .key = { 0xae, 0x68, 0x52, 0xf8, 0x12, 0x10, 0x67, 0xcc,
4594 0x4b, 0xf7, 0xa5, 0x76, 0x55, 0x77, 0xf3, 0x9e,
4595 0x00, 0x00, 0x00, 0x30 },
4596 .klen = 20,
4597 .iv = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
4598 .input = { 0xe4, 0x09, 0x5d, 0x4f, 0xb7, 0xa7, 0xb3, 0x79,
4599 0x2d, 0x61, 0x75, 0xa3, 0x26, 0x13, 0x11, 0xb8 },
4600 .ilen = 16,
4601 .result = { "Single block msg" },
4602 .rlen = 16,
4603 }, {
4604 .key = { 0x7e, 0x24, 0x06, 0x78, 0x17, 0xfa, 0xe0, 0xd7,
4605 0x43, 0xd6, 0xce, 0x1f, 0x32, 0x53, 0x91, 0x63,
4606 0x00, 0x6c, 0xb6, 0xdb },
4607 .klen = 20,
4608 .iv = { 0xc0, 0x54, 0x3b, 0x59, 0xda, 0x48, 0xd9, 0x0b },
4609 .input = { 0x51, 0x04, 0xa1, 0x06, 0x16, 0x8a, 0x72, 0xd9,
4610 0x79, 0x0d, 0x41, 0xee, 0x8e, 0xda, 0xd3, 0x88,
4611 0xeb, 0x2e, 0x1e, 0xfc, 0x46, 0xda, 0x57, 0xc8,
4612 0xfc, 0xe6, 0x30, 0xdf, 0x91, 0x41, 0xbe, 0x28 },
4613 .ilen = 32,
4614 .result = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
4615 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
4616 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
4617 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
4618 .rlen = 32,
4619 }, {
4620 .key = { 0x16, 0xaf, 0x5b, 0x14, 0x5f, 0xc9, 0xf5, 0x79,
4621 0xc1, 0x75, 0xf9, 0x3e, 0x3b, 0xfb, 0x0e, 0xed,
4622 0x86, 0x3d, 0x06, 0xcc, 0xfd, 0xb7, 0x85, 0x15,
4623 0x00, 0x00, 0x00, 0x48 },
4624 .klen = 28,
4625 .iv = { 0x36, 0x73, 0x3c, 0x14, 0x7d, 0x6d, 0x93, 0xcb },
4626 .input = { 0x4b, 0x55, 0x38, 0x4f, 0xe2, 0x59, 0xc9, 0xc8,
4627 0x4e, 0x79, 0x35, 0xa0, 0x03, 0xcb, 0xe9, 0x28 },
4628 .ilen = 16,
4629 .result = { "Single block msg" },
4630 .rlen = 16,
4631 }, {
4632 .key = { 0x7c, 0x5c, 0xb2, 0x40, 0x1b, 0x3d, 0xc3, 0x3c,
4633 0x19, 0xe7, 0x34, 0x08, 0x19, 0xe0, 0xf6, 0x9c,
4634 0x67, 0x8c, 0x3d, 0xb8, 0xe6, 0xf6, 0xa9, 0x1a,
4635 0x00, 0x96, 0xb0, 0x3b },
4636 .klen = 28,
4637 .iv = { 0x02, 0x0c, 0x6e, 0xad, 0xc2, 0xcb, 0x50, 0x0d },
4638 .input = { 0x45, 0x32, 0x43, 0xfc, 0x60, 0x9b, 0x23, 0x32,
4639 0x7e, 0xdf, 0xaa, 0xfa, 0x71, 0x31, 0xcd, 0x9f,
4640 0x84, 0x90, 0x70, 0x1c, 0x5a, 0xd4, 0xa7, 0x9c,
4641 0xfc, 0x1f, 0xe0, 0xff, 0x42, 0xf4, 0xfb, 0x00 },
4642 .ilen = 32,
4643 .result = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
4644 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
4645 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
4646 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
4647 .rlen = 32,
4648 }, {
4649 .key = { 0x77, 0x6b, 0xef, 0xf2, 0x85, 0x1d, 0xb0, 0x6f,
4650 0x4c, 0x8a, 0x05, 0x42, 0xc8, 0x69, 0x6f, 0x6c,
4651 0x6a, 0x81, 0xaf, 0x1e, 0xec, 0x96, 0xb4, 0xd3,
4652 0x7f, 0xc1, 0xd6, 0x89, 0xe6, 0xc1, 0xc1, 0x04,
4653 0x00, 0x00, 0x00, 0x60 },
4654 .klen = 36,
4655 .iv = { 0xdb, 0x56, 0x72, 0xc9, 0x7a, 0xa8, 0xf0, 0xb2 },
4656 .input = { 0x14, 0x5a, 0xd0, 0x1d, 0xbf, 0x82, 0x4e, 0xc7,
4657 0x56, 0x08, 0x63, 0xdc, 0x71, 0xe3, 0xe0, 0xc0 },
4658 .ilen = 16,
4659 .result = { "Single block msg" },
4660 .rlen = 16,
4661 }, {
4662 .key = { 0xf6, 0xd6, 0x6d, 0x6b, 0xd5, 0x2d, 0x59, 0xbb,
4663 0x07, 0x96, 0x36, 0x58, 0x79, 0xef, 0xf8, 0x86,
4664 0xc6, 0x6d, 0xd5, 0x1a, 0x5b, 0x6a, 0x99, 0x74,
4665 0x4b, 0x50, 0x59, 0x0c, 0x87, 0xa2, 0x38, 0x84,
4666 0x00, 0xfa, 0xac, 0x24 },
4667 .klen = 36,
4668 .iv = { 0xc1, 0x58, 0x5e, 0xf1, 0x5a, 0x43, 0xd8, 0x75 },
4669 .input = { 0xf0, 0x5e, 0x23, 0x1b, 0x38, 0x94, 0x61, 0x2c,
4670 0x49, 0xee, 0x00, 0x0b, 0x80, 0x4e, 0xb2, 0xa9,
4671 0xb8, 0x30, 0x6b, 0x50, 0x8f, 0x83, 0x9d, 0x6a,
4672 0x55, 0x30, 0x83, 0x1d, 0x93, 0x44, 0xaf, 0x1c },
4673 .ilen = 32,
4674 .result = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
4675 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
4676 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
4677 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f },
4678 .rlen = 32,
4679 },
4680};
4681
4682static struct aead_testvec aes_gcm_enc_tv_template[] = {
4683 { /* From McGrew & Viega - http://citeseer.ist.psu.edu/656989.html */
4684 .klen = 16,
4685 .result = { 0x58, 0xe2, 0xfc, 0xce, 0xfa, 0x7e, 0x30, 0x61,
4686 0x36, 0x7f, 0x1d, 0x57, 0xa4, 0xe7, 0x45, 0x5a },
4687 .rlen = 16,
4688 }, {
4689 .klen = 16,
4690 .ilen = 16,
4691 .result = { 0x03, 0x88, 0xda, 0xce, 0x60, 0xb6, 0xa3, 0x92,
4692 0xf3, 0x28, 0xc2, 0xb9, 0x71, 0xb2, 0xfe, 0x78,
4693 0xab, 0x6e, 0x47, 0xd4, 0x2c, 0xec, 0x13, 0xbd,
4694 0xf5, 0x3a, 0x67, 0xb2, 0x12, 0x57, 0xbd, 0xdf },
4695 .rlen = 32,
4696 }, {
4697 .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
4698 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
4699 .klen = 16,
4700 .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
4701 0xde, 0xca, 0xf8, 0x88 },
4702 .input = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
4703 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
4704 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
4705 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
4706 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
4707 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
4708 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
4709 0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 },
4710 .ilen = 64,
4711 .result = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
4712 0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
4713 0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
4714 0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
4715 0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
4716 0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
4717 0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
4718 0x3d, 0x58, 0xe0, 0x91, 0x47, 0x3f, 0x59, 0x85,
4719 0x4d, 0x5c, 0x2a, 0xf3, 0x27, 0xcd, 0x64, 0xa6,
4720 0x2c, 0xf3, 0x5a, 0xbd, 0x2b, 0xa6, 0xfa, 0xb4 },
4721 .rlen = 80,
4722 }, {
4723 .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
4724 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
4725 .klen = 16,
4726 .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
4727 0xde, 0xca, 0xf8, 0x88 },
4728 .input = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
4729 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
4730 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
4731 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
4732 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
4733 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
4734 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
4735 0xba, 0x63, 0x7b, 0x39 },
4736 .ilen = 60,
4737 .assoc = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
4738 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
4739 0xab, 0xad, 0xda, 0xd2 },
4740 .alen = 20,
4741 .result = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
4742 0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
4743 0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
4744 0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
4745 0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
4746 0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
4747 0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
4748 0x3d, 0x58, 0xe0, 0x91,
4749 0x5b, 0xc9, 0x4f, 0xbc, 0x32, 0x21, 0xa5, 0xdb,
4750 0x94, 0xfa, 0xe9, 0x5a, 0xe7, 0x12, 0x1a, 0x47 },
4751 .rlen = 76,
4752 }, {
4753 .klen = 24,
4754 .result = { 0xcd, 0x33, 0xb2, 0x8a, 0xc7, 0x73, 0xf7, 0x4b,
4755 0xa0, 0x0e, 0xd1, 0xf3, 0x12, 0x57, 0x24, 0x35 },
4756 .rlen = 16,
4757 }, {
4758 .klen = 24,
4759 .ilen = 16,
4760 .result = { 0x98, 0xe7, 0x24, 0x7c, 0x07, 0xf0, 0xfe, 0x41,
4761 0x1c, 0x26, 0x7e, 0x43, 0x84, 0xb0, 0xf6, 0x00,
4762 0x2f, 0xf5, 0x8d, 0x80, 0x03, 0x39, 0x27, 0xab,
4763 0x8e, 0xf4, 0xd4, 0x58, 0x75, 0x14, 0xf0, 0xfb },
4764 .rlen = 32,
4765 }, {
4766 .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
4767 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
4768 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c },
4769 .klen = 24,
4770 .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
4771 0xde, 0xca, 0xf8, 0x88 },
4772 .input = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
4773 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
4774 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
4775 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
4776 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
4777 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
4778 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
4779 0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 },
4780 .ilen = 64,
4781 .result = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41,
4782 0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57,
4783 0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84,
4784 0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c,
4785 0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25,
4786 0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47,
4787 0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9,
4788 0xcc, 0xda, 0x27, 0x10, 0xac, 0xad, 0xe2, 0x56,
4789 0x99, 0x24, 0xa7, 0xc8, 0x58, 0x73, 0x36, 0xbf,
4790 0xb1, 0x18, 0x02, 0x4d, 0xb8, 0x67, 0x4a, 0x14 },
4791 .rlen = 80,
4792 }, {
4793 .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
4794 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
4795 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c },
4796 .klen = 24,
4797 .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
4798 0xde, 0xca, 0xf8, 0x88 },
4799 .input = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
4800 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
4801 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
4802 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
4803 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
4804 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
4805 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
4806 0xba, 0x63, 0x7b, 0x39 },
4807 .ilen = 60,
4808 .assoc = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
4809 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
4810 0xab, 0xad, 0xda, 0xd2 },
4811 .alen = 20,
4812 .result = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41,
4813 0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57,
4814 0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84,
4815 0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c,
4816 0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25,
4817 0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47,
4818 0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9,
4819 0xcc, 0xda, 0x27, 0x10,
4820 0x25, 0x19, 0x49, 0x8e, 0x80, 0xf1, 0x47, 0x8f,
4821 0x37, 0xba, 0x55, 0xbd, 0x6d, 0x27, 0x61, 0x8c },
4822 .rlen = 76,
4823 .np = 2,
4824 .tap = { 32, 28 },
4825 .anp = 2,
4826 .atap = { 8, 12 }
4827 }, {
4828 .klen = 32,
4829 .result = { 0x53, 0x0f, 0x8a, 0xfb, 0xc7, 0x45, 0x36, 0xb9,
4830 0xa9, 0x63, 0xb4, 0xf1, 0xc4, 0xcb, 0x73, 0x8b },
4831 .rlen = 16,
4832 }
4833};
4834
4835static struct aead_testvec aes_gcm_dec_tv_template[] = {
4836 { /* From McGrew & Viega - http://citeseer.ist.psu.edu/656989.html */
4837 .klen = 32,
4838 .input = { 0xce, 0xa7, 0x40, 0x3d, 0x4d, 0x60, 0x6b, 0x6e,
4839 0x07, 0x4e, 0xc5, 0xd3, 0xba, 0xf3, 0x9d, 0x18,
4840 0xd0, 0xd1, 0xc8, 0xa7, 0x99, 0x99, 0x6b, 0xf0,
4841 0x26, 0x5b, 0x98, 0xb5, 0xd4, 0x8a, 0xb9, 0x19 },
4842 .ilen = 32,
4843 .rlen = 16,
4844 }, {
4845 .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
4846 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
4847 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
4848 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
4849 .klen = 32,
4850 .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
4851 0xde, 0xca, 0xf8, 0x88 },
4852 .input = { 0x52, 0x2d, 0xc1, 0xf0, 0x99, 0x56, 0x7d, 0x07,
4853 0xf4, 0x7f, 0x37, 0xa3, 0x2a, 0x84, 0x42, 0x7d,
4854 0x64, 0x3a, 0x8c, 0xdc, 0xbf, 0xe5, 0xc0, 0xc9,
4855 0x75, 0x98, 0xa2, 0xbd, 0x25, 0x55, 0xd1, 0xaa,
4856 0x8c, 0xb0, 0x8e, 0x48, 0x59, 0x0d, 0xbb, 0x3d,
4857 0xa7, 0xb0, 0x8b, 0x10, 0x56, 0x82, 0x88, 0x38,
4858 0xc5, 0xf6, 0x1e, 0x63, 0x93, 0xba, 0x7a, 0x0a,
4859 0xbc, 0xc9, 0xf6, 0x62, 0x89, 0x80, 0x15, 0xad,
4860 0xb0, 0x94, 0xda, 0xc5, 0xd9, 0x34, 0x71, 0xbd,
4861 0xec, 0x1a, 0x50, 0x22, 0x70, 0xe3, 0xcc, 0x6c },
4862 .ilen = 80,
4863 .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
4864 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
4865 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
4866 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
4867 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
4868 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
4869 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
4870 0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 },
4871 .rlen = 64,
4872 }, {
4873 .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
4874 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
4875 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
4876 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
4877 .klen = 32,
4878 .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
4879 0xde, 0xca, 0xf8, 0x88 },
4880 .input = { 0x52, 0x2d, 0xc1, 0xf0, 0x99, 0x56, 0x7d, 0x07,
4881 0xf4, 0x7f, 0x37, 0xa3, 0x2a, 0x84, 0x42, 0x7d,
4882 0x64, 0x3a, 0x8c, 0xdc, 0xbf, 0xe5, 0xc0, 0xc9,
4883 0x75, 0x98, 0xa2, 0xbd, 0x25, 0x55, 0xd1, 0xaa,
4884 0x8c, 0xb0, 0x8e, 0x48, 0x59, 0x0d, 0xbb, 0x3d,
4885 0xa7, 0xb0, 0x8b, 0x10, 0x56, 0x82, 0x88, 0x38,
4886 0xc5, 0xf6, 0x1e, 0x63, 0x93, 0xba, 0x7a, 0x0a,
4887 0xbc, 0xc9, 0xf6, 0x62,
4888 0x76, 0xfc, 0x6e, 0xce, 0x0f, 0x4e, 0x17, 0x68,
4889 0xcd, 0xdf, 0x88, 0x53, 0xbb, 0x2d, 0x55, 0x1b },
4890 .ilen = 76,
4891 .assoc = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
4892 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
4893 0xab, 0xad, 0xda, 0xd2 },
4894 .alen = 20,
4895 .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
4896 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
4897 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
4898 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
4899 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
4900 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
4901 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
4902 0xba, 0x63, 0x7b, 0x39 },
4903 .rlen = 60,
4904 .np = 2,
4905 .tap = { 48, 28 },
4906 .anp = 3,
4907 .atap = { 8, 8, 4 }
4908 }, {
4909 .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
4910 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
4911 .klen = 16,
4912 .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
4913 0xde, 0xca, 0xf8, 0x88 },
4914 .input = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
4915 0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
4916 0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
4917 0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
4918 0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
4919 0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
4920 0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
4921 0x3d, 0x58, 0xe0, 0x91, 0x47, 0x3f, 0x59, 0x85,
4922 0x4d, 0x5c, 0x2a, 0xf3, 0x27, 0xcd, 0x64, 0xa6,
4923 0x2c, 0xf3, 0x5a, 0xbd, 0x2b, 0xa6, 0xfa, 0xb4 },
4924 .ilen = 80,
4925 .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
4926 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
4927 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
4928 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
4929 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
4930 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
4931 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
4932 0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 },
4933 .rlen = 64,
4934 }, {
4935 .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
4936 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 },
4937 .klen = 16,
4938 .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
4939 0xde, 0xca, 0xf8, 0x88 },
4940 .input = { 0x42, 0x83, 0x1e, 0xc2, 0x21, 0x77, 0x74, 0x24,
4941 0x4b, 0x72, 0x21, 0xb7, 0x84, 0xd0, 0xd4, 0x9c,
4942 0xe3, 0xaa, 0x21, 0x2f, 0x2c, 0x02, 0xa4, 0xe0,
4943 0x35, 0xc1, 0x7e, 0x23, 0x29, 0xac, 0xa1, 0x2e,
4944 0x21, 0xd5, 0x14, 0xb2, 0x54, 0x66, 0x93, 0x1c,
4945 0x7d, 0x8f, 0x6a, 0x5a, 0xac, 0x84, 0xaa, 0x05,
4946 0x1b, 0xa3, 0x0b, 0x39, 0x6a, 0x0a, 0xac, 0x97,
4947 0x3d, 0x58, 0xe0, 0x91,
4948 0x5b, 0xc9, 0x4f, 0xbc, 0x32, 0x21, 0xa5, 0xdb,
4949 0x94, 0xfa, 0xe9, 0x5a, 0xe7, 0x12, 0x1a, 0x47 },
4950 .ilen = 76,
4951 .assoc = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
4952 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
4953 0xab, 0xad, 0xda, 0xd2 },
4954 .alen = 20,
4955 .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
4956 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
4957 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
4958 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
4959 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
4960 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
4961 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
4962 0xba, 0x63, 0x7b, 0x39 },
4963 .rlen = 60,
4964 }, {
4965 .klen = 24,
4966 .input = { 0x98, 0xe7, 0x24, 0x7c, 0x07, 0xf0, 0xfe, 0x41,
4967 0x1c, 0x26, 0x7e, 0x43, 0x84, 0xb0, 0xf6, 0x00,
4968 0x2f, 0xf5, 0x8d, 0x80, 0x03, 0x39, 0x27, 0xab,
4969 0x8e, 0xf4, 0xd4, 0x58, 0x75, 0x14, 0xf0, 0xfb },
4970 .ilen = 32,
4971 .rlen = 16,
4972 }, {
4973 .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
4974 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
4975 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c },
4976 .klen = 24,
4977 .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
4978 0xde, 0xca, 0xf8, 0x88 },
4979 .input = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41,
4980 0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57,
4981 0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84,
4982 0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c,
4983 0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25,
4984 0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47,
4985 0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9,
4986 0xcc, 0xda, 0x27, 0x10, 0xac, 0xad, 0xe2, 0x56,
4987 0x99, 0x24, 0xa7, 0xc8, 0x58, 0x73, 0x36, 0xbf,
4988 0xb1, 0x18, 0x02, 0x4d, 0xb8, 0x67, 0x4a, 0x14 },
4989 .ilen = 80,
4990 .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
4991 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
4992 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
4993 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
4994 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
4995 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
4996 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
4997 0xba, 0x63, 0x7b, 0x39, 0x1a, 0xaf, 0xd2, 0x55 },
4998 .rlen = 64,
4999 }, {
5000 .key = { 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c,
5001 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08,
5002 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c },
5003 .klen = 24,
5004 .iv = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad,
5005 0xde, 0xca, 0xf8, 0x88 },
5006 .input = { 0x39, 0x80, 0xca, 0x0b, 0x3c, 0x00, 0xe8, 0x41,
5007 0xeb, 0x06, 0xfa, 0xc4, 0x87, 0x2a, 0x27, 0x57,
5008 0x85, 0x9e, 0x1c, 0xea, 0xa6, 0xef, 0xd9, 0x84,
5009 0x62, 0x85, 0x93, 0xb4, 0x0c, 0xa1, 0xe1, 0x9c,
5010 0x7d, 0x77, 0x3d, 0x00, 0xc1, 0x44, 0xc5, 0x25,
5011 0xac, 0x61, 0x9d, 0x18, 0xc8, 0x4a, 0x3f, 0x47,
5012 0x18, 0xe2, 0x44, 0x8b, 0x2f, 0xe3, 0x24, 0xd9,
5013 0xcc, 0xda, 0x27, 0x10,
5014 0x25, 0x19, 0x49, 0x8e, 0x80, 0xf1, 0x47, 0x8f,
5015 0x37, 0xba, 0x55, 0xbd, 0x6d, 0x27, 0x61, 0x8c },
5016 .ilen = 76,
5017 .assoc = { 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
5018 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef,
5019 0xab, 0xad, 0xda, 0xd2 },
5020 .alen = 20,
5021 .result = { 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5,
5022 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a,
5023 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda,
5024 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72,
5025 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53,
5026 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25,
5027 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57,
5028 0xba, 0x63, 0x7b, 0x39 },
5029 .rlen = 60,
5030 }
5031};
5032
5033static struct aead_testvec aes_ccm_enc_tv_template[] = {
5034 { /* From RFC 3610 */
5035 .key = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
5036 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
5037 .klen = 16,
5038 .iv = { 0x01, 0x00, 0x00, 0x00, 0x03, 0x02, 0x01, 0x00,
5039 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
5040 .assoc = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 },
5041 .alen = 8,
5042 .input = { 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
5043 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
5044 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e },
5045 .ilen = 23,
5046 .result = { 0x58, 0x8c, 0x97, 0x9a, 0x61, 0xc6, 0x63, 0xd2,
5047 0xf0, 0x66, 0xd0, 0xc2, 0xc0, 0xf9, 0x89, 0x80,
5048 0x6d, 0x5f, 0x6b, 0x61, 0xda, 0xc3, 0x84, 0x17,
5049 0xe8, 0xd1, 0x2c, 0xfd, 0xf9, 0x26, 0xe0 },
5050 .rlen = 31,
5051 }, {
5052 .key = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
5053 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
5054 .klen = 16,
5055 .iv = { 0x01, 0x00, 0x00, 0x00, 0x07, 0x06, 0x05, 0x04,
5056 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
5057 .assoc = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
5058 0x08, 0x09, 0x0a, 0x0b },
5059 .alen = 12,
5060 .input = { 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
5061 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
5062 0x1c, 0x1d, 0x1e, 0x1f },
5063 .ilen = 20,
5064 .result = { 0xdc, 0xf1, 0xfb, 0x7b, 0x5d, 0x9e, 0x23, 0xfb,
5065 0x9d, 0x4e, 0x13, 0x12, 0x53, 0x65, 0x8a, 0xd8,
5066 0x6e, 0xbd, 0xca, 0x3e, 0x51, 0xe8, 0x3f, 0x07,
5067 0x7d, 0x9c, 0x2d, 0x93 },
5068 .rlen = 28,
5069 }, {
5070 .key = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
5071 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
5072 .klen = 16,
5073 .iv = { 0x01, 0x00, 0x00, 0x00, 0x0b, 0x0a, 0x09, 0x08,
5074 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
5075 .assoc = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 },
5076 .alen = 8,
5077 .input = { 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
5078 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
5079 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
5080 0x20 },
5081 .ilen = 25,
5082 .result = { 0x82, 0x53, 0x1a, 0x60, 0xcc, 0x24, 0x94, 0x5a,
5083 0x4b, 0x82, 0x79, 0x18, 0x1a, 0xb5, 0xc8, 0x4d,
5084 0xf2, 0x1c, 0xe7, 0xf9, 0xb7, 0x3f, 0x42, 0xe1,
5085 0x97, 0xea, 0x9c, 0x07, 0xe5, 0x6b, 0x5e, 0xb1,
5086 0x7e, 0x5f, 0x4e },
5087 .rlen = 35,
5088 }, {
5089 .key = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
5090 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
5091 .klen = 16,
5092 .iv = { 0x01, 0x00, 0x00, 0x00, 0x0c, 0x0b, 0x0a, 0x09,
5093 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
5094 .assoc = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
5095 0x08, 0x09, 0x0a, 0x0b },
5096 .alen = 12,
5097 .input = { 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
5098 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
5099 0x1c, 0x1d, 0x1e },
5100 .ilen = 19,
5101 .result = { 0x07, 0x34, 0x25, 0x94, 0x15, 0x77, 0x85, 0x15,
5102 0x2b, 0x07, 0x40, 0x98, 0x33, 0x0a, 0xbb, 0x14,
5103 0x1b, 0x94, 0x7b, 0x56, 0x6a, 0xa9, 0x40, 0x6b,
5104 0x4d, 0x99, 0x99, 0x88, 0xdd },
5105 .rlen = 29,
5106 }, {
5107 .key = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
5108 0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
5109 .klen = 16,
5110 .iv = { 0x01, 0x00, 0x33, 0x56, 0x8e, 0xf7, 0xb2, 0x63,
5111 0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
5112 .assoc = { 0x63, 0x01, 0x8f, 0x76, 0xdc, 0x8a, 0x1b, 0xcb },
5113 .alen = 8,
5114 .input = { 0x90, 0x20, 0xea, 0x6f, 0x91, 0xbd, 0xd8, 0x5a,
5115 0xfa, 0x00, 0x39, 0xba, 0x4b, 0xaf, 0xf9, 0xbf,
5116 0xb7, 0x9c, 0x70, 0x28, 0x94, 0x9c, 0xd0, 0xec },
5117 .ilen = 24,
5118 .result = { 0x4c, 0xcb, 0x1e, 0x7c, 0xa9, 0x81, 0xbe, 0xfa,
5119 0xa0, 0x72, 0x6c, 0x55, 0xd3, 0x78, 0x06, 0x12,
5120 0x98, 0xc8, 0x5c, 0x92, 0x81, 0x4a, 0xbc, 0x33,
5121 0xc5, 0x2e, 0xe8, 0x1d, 0x7d, 0x77, 0xc0, 0x8a },
5122 .rlen = 32,
5123 }, {
5124 .key = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
5125 0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
5126 .klen = 16,
5127 .iv = { 0x01, 0x00, 0xd5, 0x60, 0x91, 0x2d, 0x3f, 0x70,
5128 0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
5129 .assoc = { 0xcd, 0x90, 0x44, 0xd2, 0xb7, 0x1f, 0xdb, 0x81,
5130 0x20, 0xea, 0x60, 0xc0 },
5131 .alen = 12,
5132 .input = { 0x64, 0x35, 0xac, 0xba, 0xfb, 0x11, 0xa8, 0x2e,
5133 0x2f, 0x07, 0x1d, 0x7c, 0xa4, 0xa5, 0xeb, 0xd9,
5134 0x3a, 0x80, 0x3b, 0xa8, 0x7f },
5135 .ilen = 21,
5136 .result = { 0x00, 0x97, 0x69, 0xec, 0xab, 0xdf, 0x48, 0x62,
5137 0x55, 0x94, 0xc5, 0x92, 0x51, 0xe6, 0x03, 0x57,
5138 0x22, 0x67, 0x5e, 0x04, 0xc8, 0x47, 0x09, 0x9e,
5139 0x5a, 0xe0, 0x70, 0x45, 0x51 },
5140 .rlen = 29,
5141 }, {
5142 .key = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
5143 0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
5144 .klen = 16,
5145 .iv = { 0x01, 0x00, 0x42, 0xff, 0xf8, 0xf1, 0x95, 0x1c,
5146 0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
5147 .assoc = { 0xd8, 0x5b, 0xc7, 0xe6, 0x9f, 0x94, 0x4f, 0xb8 },
5148 .alen = 8,
5149 .input = { 0x8a, 0x19, 0xb9, 0x50, 0xbc, 0xf7, 0x1a, 0x01,
5150 0x8e, 0x5e, 0x67, 0x01, 0xc9, 0x17, 0x87, 0x65,
5151 0x98, 0x09, 0xd6, 0x7d, 0xbe, 0xdd, 0x18 },
5152 .ilen = 23,
5153 .result = { 0xbc, 0x21, 0x8d, 0xaa, 0x94, 0x74, 0x27, 0xb6,
5154 0xdb, 0x38, 0x6a, 0x99, 0xac, 0x1a, 0xef, 0x23,
5155 0xad, 0xe0, 0xb5, 0x29, 0x39, 0xcb, 0x6a, 0x63,
5156 0x7c, 0xf9, 0xbe, 0xc2, 0x40, 0x88, 0x97, 0xc6,
5157 0xba },
5158 .rlen = 33,
5159 },
5160};
5161
5162static struct aead_testvec aes_ccm_dec_tv_template[] = {
5163 { /* From RFC 3610 */
5164 .key = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
5165 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
5166 .klen = 16,
5167 .iv = { 0x01, 0x00, 0x00, 0x00, 0x03, 0x02, 0x01, 0x00,
5168 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
5169 .assoc = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 },
5170 .alen = 8,
5171 .input = { 0x58, 0x8c, 0x97, 0x9a, 0x61, 0xc6, 0x63, 0xd2,
5172 0xf0, 0x66, 0xd0, 0xc2, 0xc0, 0xf9, 0x89, 0x80,
5173 0x6d, 0x5f, 0x6b, 0x61, 0xda, 0xc3, 0x84, 0x17,
5174 0xe8, 0xd1, 0x2c, 0xfd, 0xf9, 0x26, 0xe0 },
5175 .ilen = 31,
5176 .result = { 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
5177 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
5178 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e },
5179 .rlen = 23,
5180 }, {
5181 .key = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
5182 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
5183 .klen = 16,
5184 .iv = { 0x01, 0x00, 0x00, 0x00, 0x07, 0x06, 0x05, 0x04,
5185 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
5186 .assoc = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
5187 0x08, 0x09, 0x0a, 0x0b },
5188 .alen = 12,
5189 .input = { 0xdc, 0xf1, 0xfb, 0x7b, 0x5d, 0x9e, 0x23, 0xfb,
5190 0x9d, 0x4e, 0x13, 0x12, 0x53, 0x65, 0x8a, 0xd8,
5191 0x6e, 0xbd, 0xca, 0x3e, 0x51, 0xe8, 0x3f, 0x07,
5192 0x7d, 0x9c, 0x2d, 0x93 },
5193 .ilen = 28,
5194 .result = { 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
5195 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
5196 0x1c, 0x1d, 0x1e, 0x1f },
5197 .rlen = 20,
5198 }, {
5199 .key = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
5200 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
5201 .klen = 16,
5202 .iv = { 0x01, 0x00, 0x00, 0x00, 0x0b, 0x0a, 0x09, 0x08,
5203 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
5204 .assoc = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 },
5205 .alen = 8,
5206 .input = { 0x82, 0x53, 0x1a, 0x60, 0xcc, 0x24, 0x94, 0x5a,
5207 0x4b, 0x82, 0x79, 0x18, 0x1a, 0xb5, 0xc8, 0x4d,
5208 0xf2, 0x1c, 0xe7, 0xf9, 0xb7, 0x3f, 0x42, 0xe1,
5209 0x97, 0xea, 0x9c, 0x07, 0xe5, 0x6b, 0x5e, 0xb1,
5210 0x7e, 0x5f, 0x4e },
5211 .ilen = 35,
5212 .result = { 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
5213 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
5214 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
5215 0x20 },
5216 .rlen = 25,
5217 }, {
5218 .key = { 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
5219 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf },
5220 .klen = 16,
5221 .iv = { 0x01, 0x00, 0x00, 0x00, 0x0c, 0x0b, 0x0a, 0x09,
5222 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0x00, 0x00 },
5223 .assoc = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
5224 0x08, 0x09, 0x0a, 0x0b },
5225 .alen = 12,
5226 .input = { 0x07, 0x34, 0x25, 0x94, 0x15, 0x77, 0x85, 0x15,
5227 0x2b, 0x07, 0x40, 0x98, 0x33, 0x0a, 0xbb, 0x14,
5228 0x1b, 0x94, 0x7b, 0x56, 0x6a, 0xa9, 0x40, 0x6b,
5229 0x4d, 0x99, 0x99, 0x88, 0xdd },
5230 .ilen = 29,
5231 .result = { 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
5232 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
5233 0x1c, 0x1d, 0x1e },
5234 .rlen = 19,
5235 }, {
5236 .key = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
5237 0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
5238 .klen = 16,
5239 .iv = { 0x01, 0x00, 0x33, 0x56, 0x8e, 0xf7, 0xb2, 0x63,
5240 0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
5241 .assoc = { 0x63, 0x01, 0x8f, 0x76, 0xdc, 0x8a, 0x1b, 0xcb },
5242 .alen = 8,
5243 .input = { 0x4c, 0xcb, 0x1e, 0x7c, 0xa9, 0x81, 0xbe, 0xfa,
5244 0xa0, 0x72, 0x6c, 0x55, 0xd3, 0x78, 0x06, 0x12,
5245 0x98, 0xc8, 0x5c, 0x92, 0x81, 0x4a, 0xbc, 0x33,
5246 0xc5, 0x2e, 0xe8, 0x1d, 0x7d, 0x77, 0xc0, 0x8a },
5247 .ilen = 32,
5248 .result = { 0x90, 0x20, 0xea, 0x6f, 0x91, 0xbd, 0xd8, 0x5a,
5249 0xfa, 0x00, 0x39, 0xba, 0x4b, 0xaf, 0xf9, 0xbf,
5250 0xb7, 0x9c, 0x70, 0x28, 0x94, 0x9c, 0xd0, 0xec },
5251 .rlen = 24,
5252 }, {
5253 .key = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
5254 0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
5255 .klen = 16,
5256 .iv = { 0x01, 0x00, 0xd5, 0x60, 0x91, 0x2d, 0x3f, 0x70,
5257 0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
5258 .assoc = { 0xcd, 0x90, 0x44, 0xd2, 0xb7, 0x1f, 0xdb, 0x81,
5259 0x20, 0xea, 0x60, 0xc0 },
5260 .alen = 12,
5261 .input = { 0x00, 0x97, 0x69, 0xec, 0xab, 0xdf, 0x48, 0x62,
5262 0x55, 0x94, 0xc5, 0x92, 0x51, 0xe6, 0x03, 0x57,
5263 0x22, 0x67, 0x5e, 0x04, 0xc8, 0x47, 0x09, 0x9e,
5264 0x5a, 0xe0, 0x70, 0x45, 0x51 },
5265 .ilen = 29,
5266 .result = { 0x64, 0x35, 0xac, 0xba, 0xfb, 0x11, 0xa8, 0x2e,
5267 0x2f, 0x07, 0x1d, 0x7c, 0xa4, 0xa5, 0xeb, 0xd9,
5268 0x3a, 0x80, 0x3b, 0xa8, 0x7f },
5269 .rlen = 21,
5270 }, {
5271 .key = { 0xd7, 0x82, 0x8d, 0x13, 0xb2, 0xb0, 0xbd, 0xc3,
5272 0x25, 0xa7, 0x62, 0x36, 0xdf, 0x93, 0xcc, 0x6b },
5273 .klen = 16,
5274 .iv = { 0x01, 0x00, 0x42, 0xff, 0xf8, 0xf1, 0x95, 0x1c,
5275 0x3c, 0x96, 0x96, 0x76, 0x6c, 0xfa, 0x00, 0x00 },
5276 .assoc = { 0xd8, 0x5b, 0xc7, 0xe6, 0x9f, 0x94, 0x4f, 0xb8 },
5277 .alen = 8,
5278 .input = { 0xbc, 0x21, 0x8d, 0xaa, 0x94, 0x74, 0x27, 0xb6,
5279 0xdb, 0x38, 0x6a, 0x99, 0xac, 0x1a, 0xef, 0x23,
5280 0xad, 0xe0, 0xb5, 0x29, 0x39, 0xcb, 0x6a, 0x63,
5281 0x7c, 0xf9, 0xbe, 0xc2, 0x40, 0x88, 0x97, 0xc6,
5282 0xba },
5283 .ilen = 33,
5284 .result = { 0x8a, 0x19, 0xb9, 0x50, 0xbc, 0xf7, 0x1a, 0x01,
5285 0x8e, 0x5e, 0x67, 0x01, 0xc9, 0x17, 0x87, 0x65,
5286 0x98, 0x09, 0xd6, 0x7d, 0xbe, 0xdd, 0x18 },
5287 .rlen = 23,
5288 },
5289};
5290
3183/* Cast5 test vectors from RFC 2144 */ 5291/* Cast5 test vectors from RFC 2144 */
3184#define CAST5_ENC_TEST_VECTORS 3 5292#define CAST5_ENC_TEST_VECTORS 3
3185#define CAST5_DEC_TEST_VECTORS 3 5293#define CAST5_DEC_TEST_VECTORS 3
@@ -4317,6 +6425,1211 @@ static struct cipher_testvec seed_dec_tv_template[] = {
4317 } 6425 }
4318}; 6426};
4319 6427
6428#define SALSA20_STREAM_ENC_TEST_VECTORS 5
6429static struct cipher_testvec salsa20_stream_enc_tv_template[] = {
6430 /*
6431 * Testvectors from verified.test-vectors submitted to ECRYPT.
6432 * They are truncated to size 39, 64, 111, 129 to test a variety
6433 * of input length.
6434 */
6435 { /* Set 3, vector 0 */
6436 .key = {
6437 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
6438 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F
6439 },
6440 .klen = 16,
6441 .iv = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
6442 .input = {
6443 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6444 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6445 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6446 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6447 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6448 },
6449 .ilen = 39,
6450 .result = {
6451 0x2D, 0xD5, 0xC3, 0xF7, 0xBA, 0x2B, 0x20, 0xF7,
6452 0x68, 0x02, 0x41, 0x0C, 0x68, 0x86, 0x88, 0x89,
6453 0x5A, 0xD8, 0xC1, 0xBD, 0x4E, 0xA6, 0xC9, 0xB1,
6454 0x40, 0xFB, 0x9B, 0x90, 0xE2, 0x10, 0x49, 0xBF,
6455 0x58, 0x3F, 0x52, 0x79, 0x70, 0xEB, 0xC1,
6456 },
6457 .rlen = 39,
6458 }, { /* Set 5, vector 0 */
6459 .key = {
6460 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6461 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
6462 },
6463 .klen = 16,
6464 .iv = { 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
6465 .input = {
6466 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6467 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6468 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6469 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6470 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6471 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6472 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6473 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6474 },
6475 .ilen = 64,
6476 .result = {
6477 0xB6, 0x6C, 0x1E, 0x44, 0x46, 0xDD, 0x95, 0x57,
6478 0xE5, 0x78, 0xE2, 0x23, 0xB0, 0xB7, 0x68, 0x01,
6479 0x7B, 0x23, 0xB2, 0x67, 0xBB, 0x02, 0x34, 0xAE,
6480 0x46, 0x26, 0xBF, 0x44, 0x3F, 0x21, 0x97, 0x76,
6481 0x43, 0x6F, 0xB1, 0x9F, 0xD0, 0xE8, 0x86, 0x6F,
6482 0xCD, 0x0D, 0xE9, 0xA9, 0x53, 0x8F, 0x4A, 0x09,
6483 0xCA, 0x9A, 0xC0, 0x73, 0x2E, 0x30, 0xBC, 0xF9,
6484 0x8E, 0x4F, 0x13, 0xE4, 0xB9, 0xE2, 0x01, 0xD9,
6485 },
6486 .rlen = 64,
6487 }, { /* Set 3, vector 27 */
6488 .key = {
6489 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20, 0x21, 0x22,
6490 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A,
6491 0x2B, 0x2C, 0x2D, 0x2E, 0x2F, 0x30, 0x31, 0x32,
6492 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A
6493 },
6494 .klen = 32,
6495 .iv = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
6496 .input = {
6497 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6498 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6499 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6500 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6501 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6502 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6503 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6504 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6505
6506 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6507 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6508 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6509 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6510 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6511 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6512 },
6513 .ilen = 111,
6514 .result = {
6515 0xAE, 0x39, 0x50, 0x8E, 0xAC, 0x9A, 0xEC, 0xE7,
6516 0xBF, 0x97, 0xBB, 0x20, 0xB9, 0xDE, 0xE4, 0x1F,
6517 0x87, 0xD9, 0x47, 0xF8, 0x28, 0x91, 0x35, 0x98,
6518 0xDB, 0x72, 0xCC, 0x23, 0x29, 0x48, 0x56, 0x5E,
6519 0x83, 0x7E, 0x0B, 0xF3, 0x7D, 0x5D, 0x38, 0x7B,
6520 0x2D, 0x71, 0x02, 0xB4, 0x3B, 0xB5, 0xD8, 0x23,
6521 0xB0, 0x4A, 0xDF, 0x3C, 0xEC, 0xB6, 0xD9, 0x3B,
6522 0x9B, 0xA7, 0x52, 0xBE, 0xC5, 0xD4, 0x50, 0x59,
6523
6524 0x15, 0x14, 0xB4, 0x0E, 0x40, 0xE6, 0x53, 0xD1,
6525 0x83, 0x9C, 0x5B, 0xA0, 0x92, 0x29, 0x6B, 0x5E,
6526 0x96, 0x5B, 0x1E, 0x2F, 0xD3, 0xAC, 0xC1, 0x92,
6527 0xB1, 0x41, 0x3F, 0x19, 0x2F, 0xC4, 0x3B, 0xC6,
6528 0x95, 0x46, 0x45, 0x54, 0xE9, 0x75, 0x03, 0x08,
6529 0x44, 0xAF, 0xE5, 0x8A, 0x81, 0x12, 0x09,
6530 },
6531 .rlen = 111,
6532
6533 }, { /* Set 5, vector 27 */
6534 .key = {
6535 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6536 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6537 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6538 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
6539 },
6540 .klen = 32,
6541 .iv = { 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00 },
6542 .input = {
6543 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6544 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6545 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6546 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6547 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6548 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6549 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6550 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6551
6552 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6553 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6554 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6555 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6556 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6557 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6558 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6559 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6560
6561 0x00,
6562 },
6563 .ilen = 129,
6564 .result = {
6565 0xD2, 0xDB, 0x1A, 0x5C, 0xF1, 0xC1, 0xAC, 0xDB,
6566 0xE8, 0x1A, 0x7A, 0x43, 0x40, 0xEF, 0x53, 0x43,
6567 0x5E, 0x7F, 0x4B, 0x1A, 0x50, 0x52, 0x3F, 0x8D,
6568 0x28, 0x3D, 0xCF, 0x85, 0x1D, 0x69, 0x6E, 0x60,
6569 0xF2, 0xDE, 0x74, 0x56, 0x18, 0x1B, 0x84, 0x10,
6570 0xD4, 0x62, 0xBA, 0x60, 0x50, 0xF0, 0x61, 0xF2,
6571 0x1C, 0x78, 0x7F, 0xC1, 0x24, 0x34, 0xAF, 0x58,
6572 0xBF, 0x2C, 0x59, 0xCA, 0x90, 0x77, 0xF3, 0xB0,
6573
6574 0x5B, 0x4A, 0xDF, 0x89, 0xCE, 0x2C, 0x2F, 0xFC,
6575 0x67, 0xF0, 0xE3, 0x45, 0xE8, 0xB3, 0xB3, 0x75,
6576 0xA0, 0x95, 0x71, 0xA1, 0x29, 0x39, 0x94, 0xCA,
6577 0x45, 0x2F, 0xBD, 0xCB, 0x10, 0xB6, 0xBE, 0x9F,
6578 0x8E, 0xF9, 0xB2, 0x01, 0x0A, 0x5A, 0x0A, 0xB7,
6579 0x6B, 0x9D, 0x70, 0x8E, 0x4B, 0xD6, 0x2F, 0xCD,
6580 0x2E, 0x40, 0x48, 0x75, 0xE9, 0xE2, 0x21, 0x45,
6581 0x0B, 0xC9, 0xB6, 0xB5, 0x66, 0xBC, 0x9A, 0x59,
6582
6583 0x5A,
6584 },
6585 .rlen = 129,
6586 }, { /* large test vector generated using Crypto++ */
6587 .key = {
6588 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
6589 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
6590 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
6591 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
6592 },
6593 .klen = 32,
6594 .iv = {
6595 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6596 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
6597 },
6598 .input = {
6599 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
6600 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
6601 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
6602 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
6603 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27,
6604 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f,
6605 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37,
6606 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f,
6607 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47,
6608 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f,
6609 0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57,
6610 0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f,
6611 0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67,
6612 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f,
6613 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77,
6614 0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f,
6615 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
6616 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
6617 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
6618 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
6619 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
6620 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
6621 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
6622 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
6623 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
6624 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf,
6625 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
6626 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf,
6627 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
6628 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef,
6629 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
6630 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff,
6631 0x00, 0x03, 0x06, 0x09, 0x0c, 0x0f, 0x12, 0x15,
6632 0x18, 0x1b, 0x1e, 0x21, 0x24, 0x27, 0x2a, 0x2d,
6633 0x30, 0x33, 0x36, 0x39, 0x3c, 0x3f, 0x42, 0x45,
6634 0x48, 0x4b, 0x4e, 0x51, 0x54, 0x57, 0x5a, 0x5d,
6635 0x60, 0x63, 0x66, 0x69, 0x6c, 0x6f, 0x72, 0x75,
6636 0x78, 0x7b, 0x7e, 0x81, 0x84, 0x87, 0x8a, 0x8d,
6637 0x90, 0x93, 0x96, 0x99, 0x9c, 0x9f, 0xa2, 0xa5,
6638 0xa8, 0xab, 0xae, 0xb1, 0xb4, 0xb7, 0xba, 0xbd,
6639 0xc0, 0xc3, 0xc6, 0xc9, 0xcc, 0xcf, 0xd2, 0xd5,
6640 0xd8, 0xdb, 0xde, 0xe1, 0xe4, 0xe7, 0xea, 0xed,
6641 0xf0, 0xf3, 0xf6, 0xf9, 0xfc, 0xff, 0x02, 0x05,
6642 0x08, 0x0b, 0x0e, 0x11, 0x14, 0x17, 0x1a, 0x1d,
6643 0x20, 0x23, 0x26, 0x29, 0x2c, 0x2f, 0x32, 0x35,
6644 0x38, 0x3b, 0x3e, 0x41, 0x44, 0x47, 0x4a, 0x4d,
6645 0x50, 0x53, 0x56, 0x59, 0x5c, 0x5f, 0x62, 0x65,
6646 0x68, 0x6b, 0x6e, 0x71, 0x74, 0x77, 0x7a, 0x7d,
6647 0x80, 0x83, 0x86, 0x89, 0x8c, 0x8f, 0x92, 0x95,
6648 0x98, 0x9b, 0x9e, 0xa1, 0xa4, 0xa7, 0xaa, 0xad,
6649 0xb0, 0xb3, 0xb6, 0xb9, 0xbc, 0xbf, 0xc2, 0xc5,
6650 0xc8, 0xcb, 0xce, 0xd1, 0xd4, 0xd7, 0xda, 0xdd,
6651 0xe0, 0xe3, 0xe6, 0xe9, 0xec, 0xef, 0xf2, 0xf5,
6652 0xf8, 0xfb, 0xfe, 0x01, 0x04, 0x07, 0x0a, 0x0d,
6653 0x10, 0x13, 0x16, 0x19, 0x1c, 0x1f, 0x22, 0x25,
6654 0x28, 0x2b, 0x2e, 0x31, 0x34, 0x37, 0x3a, 0x3d,
6655 0x40, 0x43, 0x46, 0x49, 0x4c, 0x4f, 0x52, 0x55,
6656 0x58, 0x5b, 0x5e, 0x61, 0x64, 0x67, 0x6a, 0x6d,
6657 0x70, 0x73, 0x76, 0x79, 0x7c, 0x7f, 0x82, 0x85,
6658 0x88, 0x8b, 0x8e, 0x91, 0x94, 0x97, 0x9a, 0x9d,
6659 0xa0, 0xa3, 0xa6, 0xa9, 0xac, 0xaf, 0xb2, 0xb5,
6660 0xb8, 0xbb, 0xbe, 0xc1, 0xc4, 0xc7, 0xca, 0xcd,
6661 0xd0, 0xd3, 0xd6, 0xd9, 0xdc, 0xdf, 0xe2, 0xe5,
6662 0xe8, 0xeb, 0xee, 0xf1, 0xf4, 0xf7, 0xfa, 0xfd,
6663 0x00, 0x05, 0x0a, 0x0f, 0x14, 0x19, 0x1e, 0x23,
6664 0x28, 0x2d, 0x32, 0x37, 0x3c, 0x41, 0x46, 0x4b,
6665 0x50, 0x55, 0x5a, 0x5f, 0x64, 0x69, 0x6e, 0x73,
6666 0x78, 0x7d, 0x82, 0x87, 0x8c, 0x91, 0x96, 0x9b,
6667 0xa0, 0xa5, 0xaa, 0xaf, 0xb4, 0xb9, 0xbe, 0xc3,
6668 0xc8, 0xcd, 0xd2, 0xd7, 0xdc, 0xe1, 0xe6, 0xeb,
6669 0xf0, 0xf5, 0xfa, 0xff, 0x04, 0x09, 0x0e, 0x13,
6670 0x18, 0x1d, 0x22, 0x27, 0x2c, 0x31, 0x36, 0x3b,
6671 0x40, 0x45, 0x4a, 0x4f, 0x54, 0x59, 0x5e, 0x63,
6672 0x68, 0x6d, 0x72, 0x77, 0x7c, 0x81, 0x86, 0x8b,
6673 0x90, 0x95, 0x9a, 0x9f, 0xa4, 0xa9, 0xae, 0xb3,
6674 0xb8, 0xbd, 0xc2, 0xc7, 0xcc, 0xd1, 0xd6, 0xdb,
6675 0xe0, 0xe5, 0xea, 0xef, 0xf4, 0xf9, 0xfe, 0x03,
6676 0x08, 0x0d, 0x12, 0x17, 0x1c, 0x21, 0x26, 0x2b,
6677 0x30, 0x35, 0x3a, 0x3f, 0x44, 0x49, 0x4e, 0x53,
6678 0x58, 0x5d, 0x62, 0x67, 0x6c, 0x71, 0x76, 0x7b,
6679 0x80, 0x85, 0x8a, 0x8f, 0x94, 0x99, 0x9e, 0xa3,
6680 0xa8, 0xad, 0xb2, 0xb7, 0xbc, 0xc1, 0xc6, 0xcb,
6681 0xd0, 0xd5, 0xda, 0xdf, 0xe4, 0xe9, 0xee, 0xf3,
6682 0xf8, 0xfd, 0x02, 0x07, 0x0c, 0x11, 0x16, 0x1b,
6683 0x20, 0x25, 0x2a, 0x2f, 0x34, 0x39, 0x3e, 0x43,
6684 0x48, 0x4d, 0x52, 0x57, 0x5c, 0x61, 0x66, 0x6b,
6685 0x70, 0x75, 0x7a, 0x7f, 0x84, 0x89, 0x8e, 0x93,
6686 0x98, 0x9d, 0xa2, 0xa7, 0xac, 0xb1, 0xb6, 0xbb,
6687 0xc0, 0xc5, 0xca, 0xcf, 0xd4, 0xd9, 0xde, 0xe3,
6688 0xe8, 0xed, 0xf2, 0xf7, 0xfc, 0x01, 0x06, 0x0b,
6689 0x10, 0x15, 0x1a, 0x1f, 0x24, 0x29, 0x2e, 0x33,
6690 0x38, 0x3d, 0x42, 0x47, 0x4c, 0x51, 0x56, 0x5b,
6691 0x60, 0x65, 0x6a, 0x6f, 0x74, 0x79, 0x7e, 0x83,
6692 0x88, 0x8d, 0x92, 0x97, 0x9c, 0xa1, 0xa6, 0xab,
6693 0xb0, 0xb5, 0xba, 0xbf, 0xc4, 0xc9, 0xce, 0xd3,
6694 0xd8, 0xdd, 0xe2, 0xe7, 0xec, 0xf1, 0xf6, 0xfb,
6695 0x00, 0x07, 0x0e, 0x15, 0x1c, 0x23, 0x2a, 0x31,
6696 0x38, 0x3f, 0x46, 0x4d, 0x54, 0x5b, 0x62, 0x69,
6697 0x70, 0x77, 0x7e, 0x85, 0x8c, 0x93, 0x9a, 0xa1,
6698 0xa8, 0xaf, 0xb6, 0xbd, 0xc4, 0xcb, 0xd2, 0xd9,
6699 0xe0, 0xe7, 0xee, 0xf5, 0xfc, 0x03, 0x0a, 0x11,
6700 0x18, 0x1f, 0x26, 0x2d, 0x34, 0x3b, 0x42, 0x49,
6701 0x50, 0x57, 0x5e, 0x65, 0x6c, 0x73, 0x7a, 0x81,
6702 0x88, 0x8f, 0x96, 0x9d, 0xa4, 0xab, 0xb2, 0xb9,
6703 0xc0, 0xc7, 0xce, 0xd5, 0xdc, 0xe3, 0xea, 0xf1,
6704 0xf8, 0xff, 0x06, 0x0d, 0x14, 0x1b, 0x22, 0x29,
6705 0x30, 0x37, 0x3e, 0x45, 0x4c, 0x53, 0x5a, 0x61,
6706 0x68, 0x6f, 0x76, 0x7d, 0x84, 0x8b, 0x92, 0x99,
6707 0xa0, 0xa7, 0xae, 0xb5, 0xbc, 0xc3, 0xca, 0xd1,
6708 0xd8, 0xdf, 0xe6, 0xed, 0xf4, 0xfb, 0x02, 0x09,
6709 0x10, 0x17, 0x1e, 0x25, 0x2c, 0x33, 0x3a, 0x41,
6710 0x48, 0x4f, 0x56, 0x5d, 0x64, 0x6b, 0x72, 0x79,
6711 0x80, 0x87, 0x8e, 0x95, 0x9c, 0xa3, 0xaa, 0xb1,
6712 0xb8, 0xbf, 0xc6, 0xcd, 0xd4, 0xdb, 0xe2, 0xe9,
6713 0xf0, 0xf7, 0xfe, 0x05, 0x0c, 0x13, 0x1a, 0x21,
6714 0x28, 0x2f, 0x36, 0x3d, 0x44, 0x4b, 0x52, 0x59,
6715 0x60, 0x67, 0x6e, 0x75, 0x7c, 0x83, 0x8a, 0x91,
6716 0x98, 0x9f, 0xa6, 0xad, 0xb4, 0xbb, 0xc2, 0xc9,
6717 0xd0, 0xd7, 0xde, 0xe5, 0xec, 0xf3, 0xfa, 0x01,
6718 0x08, 0x0f, 0x16, 0x1d, 0x24, 0x2b, 0x32, 0x39,
6719 0x40, 0x47, 0x4e, 0x55, 0x5c, 0x63, 0x6a, 0x71,
6720 0x78, 0x7f, 0x86, 0x8d, 0x94, 0x9b, 0xa2, 0xa9,
6721 0xb0, 0xb7, 0xbe, 0xc5, 0xcc, 0xd3, 0xda, 0xe1,
6722 0xe8, 0xef, 0xf6, 0xfd, 0x04, 0x0b, 0x12, 0x19,
6723 0x20, 0x27, 0x2e, 0x35, 0x3c, 0x43, 0x4a, 0x51,
6724 0x58, 0x5f, 0x66, 0x6d, 0x74, 0x7b, 0x82, 0x89,
6725 0x90, 0x97, 0x9e, 0xa5, 0xac, 0xb3, 0xba, 0xc1,
6726 0xc8, 0xcf, 0xd6, 0xdd, 0xe4, 0xeb, 0xf2, 0xf9,
6727 0x00, 0x09, 0x12, 0x1b, 0x24, 0x2d, 0x36, 0x3f,
6728 0x48, 0x51, 0x5a, 0x63, 0x6c, 0x75, 0x7e, 0x87,
6729 0x90, 0x99, 0xa2, 0xab, 0xb4, 0xbd, 0xc6, 0xcf,
6730 0xd8, 0xe1, 0xea, 0xf3, 0xfc, 0x05, 0x0e, 0x17,
6731 0x20, 0x29, 0x32, 0x3b, 0x44, 0x4d, 0x56, 0x5f,
6732 0x68, 0x71, 0x7a, 0x83, 0x8c, 0x95, 0x9e, 0xa7,
6733 0xb0, 0xb9, 0xc2, 0xcb, 0xd4, 0xdd, 0xe6, 0xef,
6734 0xf8, 0x01, 0x0a, 0x13, 0x1c, 0x25, 0x2e, 0x37,
6735 0x40, 0x49, 0x52, 0x5b, 0x64, 0x6d, 0x76, 0x7f,
6736 0x88, 0x91, 0x9a, 0xa3, 0xac, 0xb5, 0xbe, 0xc7,
6737 0xd0, 0xd9, 0xe2, 0xeb, 0xf4, 0xfd, 0x06, 0x0f,
6738 0x18, 0x21, 0x2a, 0x33, 0x3c, 0x45, 0x4e, 0x57,
6739 0x60, 0x69, 0x72, 0x7b, 0x84, 0x8d, 0x96, 0x9f,
6740 0xa8, 0xb1, 0xba, 0xc3, 0xcc, 0xd5, 0xde, 0xe7,
6741 0xf0, 0xf9, 0x02, 0x0b, 0x14, 0x1d, 0x26, 0x2f,
6742 0x38, 0x41, 0x4a, 0x53, 0x5c, 0x65, 0x6e, 0x77,
6743 0x80, 0x89, 0x92, 0x9b, 0xa4, 0xad, 0xb6, 0xbf,
6744 0xc8, 0xd1, 0xda, 0xe3, 0xec, 0xf5, 0xfe, 0x07,
6745 0x10, 0x19, 0x22, 0x2b, 0x34, 0x3d, 0x46, 0x4f,
6746 0x58, 0x61, 0x6a, 0x73, 0x7c, 0x85, 0x8e, 0x97,
6747 0xa0, 0xa9, 0xb2, 0xbb, 0xc4, 0xcd, 0xd6, 0xdf,
6748 0xe8, 0xf1, 0xfa, 0x03, 0x0c, 0x15, 0x1e, 0x27,
6749 0x30, 0x39, 0x42, 0x4b, 0x54, 0x5d, 0x66, 0x6f,
6750 0x78, 0x81, 0x8a, 0x93, 0x9c, 0xa5, 0xae, 0xb7,
6751 0xc0, 0xc9, 0xd2, 0xdb, 0xe4, 0xed, 0xf6, 0xff,
6752 0x08, 0x11, 0x1a, 0x23, 0x2c, 0x35, 0x3e, 0x47,
6753 0x50, 0x59, 0x62, 0x6b, 0x74, 0x7d, 0x86, 0x8f,
6754 0x98, 0xa1, 0xaa, 0xb3, 0xbc, 0xc5, 0xce, 0xd7,
6755 0xe0, 0xe9, 0xf2, 0xfb, 0x04, 0x0d, 0x16, 0x1f,
6756 0x28, 0x31, 0x3a, 0x43, 0x4c, 0x55, 0x5e, 0x67,
6757 0x70, 0x79, 0x82, 0x8b, 0x94, 0x9d, 0xa6, 0xaf,
6758 0xb8, 0xc1, 0xca, 0xd3, 0xdc, 0xe5, 0xee, 0xf7,
6759 0x00, 0x0b, 0x16, 0x21, 0x2c, 0x37, 0x42, 0x4d,
6760 0x58, 0x63, 0x6e, 0x79, 0x84, 0x8f, 0x9a, 0xa5,
6761 0xb0, 0xbb, 0xc6, 0xd1, 0xdc, 0xe7, 0xf2, 0xfd,
6762 0x08, 0x13, 0x1e, 0x29, 0x34, 0x3f, 0x4a, 0x55,
6763 0x60, 0x6b, 0x76, 0x81, 0x8c, 0x97, 0xa2, 0xad,
6764 0xb8, 0xc3, 0xce, 0xd9, 0xe4, 0xef, 0xfa, 0x05,
6765 0x10, 0x1b, 0x26, 0x31, 0x3c, 0x47, 0x52, 0x5d,
6766 0x68, 0x73, 0x7e, 0x89, 0x94, 0x9f, 0xaa, 0xb5,
6767 0xc0, 0xcb, 0xd6, 0xe1, 0xec, 0xf7, 0x02, 0x0d,
6768 0x18, 0x23, 0x2e, 0x39, 0x44, 0x4f, 0x5a, 0x65,
6769 0x70, 0x7b, 0x86, 0x91, 0x9c, 0xa7, 0xb2, 0xbd,
6770 0xc8, 0xd3, 0xde, 0xe9, 0xf4, 0xff, 0x0a, 0x15,
6771 0x20, 0x2b, 0x36, 0x41, 0x4c, 0x57, 0x62, 0x6d,
6772 0x78, 0x83, 0x8e, 0x99, 0xa4, 0xaf, 0xba, 0xc5,
6773 0xd0, 0xdb, 0xe6, 0xf1, 0xfc, 0x07, 0x12, 0x1d,
6774 0x28, 0x33, 0x3e, 0x49, 0x54, 0x5f, 0x6a, 0x75,
6775 0x80, 0x8b, 0x96, 0xa1, 0xac, 0xb7, 0xc2, 0xcd,
6776 0xd8, 0xe3, 0xee, 0xf9, 0x04, 0x0f, 0x1a, 0x25,
6777 0x30, 0x3b, 0x46, 0x51, 0x5c, 0x67, 0x72, 0x7d,
6778 0x88, 0x93, 0x9e, 0xa9, 0xb4, 0xbf, 0xca, 0xd5,
6779 0xe0, 0xeb, 0xf6, 0x01, 0x0c, 0x17, 0x22, 0x2d,
6780 0x38, 0x43, 0x4e, 0x59, 0x64, 0x6f, 0x7a, 0x85,
6781 0x90, 0x9b, 0xa6, 0xb1, 0xbc, 0xc7, 0xd2, 0xdd,
6782 0xe8, 0xf3, 0xfe, 0x09, 0x14, 0x1f, 0x2a, 0x35,
6783 0x40, 0x4b, 0x56, 0x61, 0x6c, 0x77, 0x82, 0x8d,
6784 0x98, 0xa3, 0xae, 0xb9, 0xc4, 0xcf, 0xda, 0xe5,
6785 0xf0, 0xfb, 0x06, 0x11, 0x1c, 0x27, 0x32, 0x3d,
6786 0x48, 0x53, 0x5e, 0x69, 0x74, 0x7f, 0x8a, 0x95,
6787 0xa0, 0xab, 0xb6, 0xc1, 0xcc, 0xd7, 0xe2, 0xed,
6788 0xf8, 0x03, 0x0e, 0x19, 0x24, 0x2f, 0x3a, 0x45,
6789 0x50, 0x5b, 0x66, 0x71, 0x7c, 0x87, 0x92, 0x9d,
6790 0xa8, 0xb3, 0xbe, 0xc9, 0xd4, 0xdf, 0xea, 0xf5,
6791 0x00, 0x0d, 0x1a, 0x27, 0x34, 0x41, 0x4e, 0x5b,
6792 0x68, 0x75, 0x82, 0x8f, 0x9c, 0xa9, 0xb6, 0xc3,
6793 0xd0, 0xdd, 0xea, 0xf7, 0x04, 0x11, 0x1e, 0x2b,
6794 0x38, 0x45, 0x52, 0x5f, 0x6c, 0x79, 0x86, 0x93,
6795 0xa0, 0xad, 0xba, 0xc7, 0xd4, 0xe1, 0xee, 0xfb,
6796 0x08, 0x15, 0x22, 0x2f, 0x3c, 0x49, 0x56, 0x63,
6797 0x70, 0x7d, 0x8a, 0x97, 0xa4, 0xb1, 0xbe, 0xcb,
6798 0xd8, 0xe5, 0xf2, 0xff, 0x0c, 0x19, 0x26, 0x33,
6799 0x40, 0x4d, 0x5a, 0x67, 0x74, 0x81, 0x8e, 0x9b,
6800 0xa8, 0xb5, 0xc2, 0xcf, 0xdc, 0xe9, 0xf6, 0x03,
6801 0x10, 0x1d, 0x2a, 0x37, 0x44, 0x51, 0x5e, 0x6b,
6802 0x78, 0x85, 0x92, 0x9f, 0xac, 0xb9, 0xc6, 0xd3,
6803 0xe0, 0xed, 0xfa, 0x07, 0x14, 0x21, 0x2e, 0x3b,
6804 0x48, 0x55, 0x62, 0x6f, 0x7c, 0x89, 0x96, 0xa3,
6805 0xb0, 0xbd, 0xca, 0xd7, 0xe4, 0xf1, 0xfe, 0x0b,
6806 0x18, 0x25, 0x32, 0x3f, 0x4c, 0x59, 0x66, 0x73,
6807 0x80, 0x8d, 0x9a, 0xa7, 0xb4, 0xc1, 0xce, 0xdb,
6808 0xe8, 0xf5, 0x02, 0x0f, 0x1c, 0x29, 0x36, 0x43,
6809 0x50, 0x5d, 0x6a, 0x77, 0x84, 0x91, 0x9e, 0xab,
6810 0xb8, 0xc5, 0xd2, 0xdf, 0xec, 0xf9, 0x06, 0x13,
6811 0x20, 0x2d, 0x3a, 0x47, 0x54, 0x61, 0x6e, 0x7b,
6812 0x88, 0x95, 0xa2, 0xaf, 0xbc, 0xc9, 0xd6, 0xe3,
6813 0xf0, 0xfd, 0x0a, 0x17, 0x24, 0x31, 0x3e, 0x4b,
6814 0x58, 0x65, 0x72, 0x7f, 0x8c, 0x99, 0xa6, 0xb3,
6815 0xc0, 0xcd, 0xda, 0xe7, 0xf4, 0x01, 0x0e, 0x1b,
6816 0x28, 0x35, 0x42, 0x4f, 0x5c, 0x69, 0x76, 0x83,
6817 0x90, 0x9d, 0xaa, 0xb7, 0xc4, 0xd1, 0xde, 0xeb,
6818 0xf8, 0x05, 0x12, 0x1f, 0x2c, 0x39, 0x46, 0x53,
6819 0x60, 0x6d, 0x7a, 0x87, 0x94, 0xa1, 0xae, 0xbb,
6820 0xc8, 0xd5, 0xe2, 0xef, 0xfc, 0x09, 0x16, 0x23,
6821 0x30, 0x3d, 0x4a, 0x57, 0x64, 0x71, 0x7e, 0x8b,
6822 0x98, 0xa5, 0xb2, 0xbf, 0xcc, 0xd9, 0xe6, 0xf3,
6823 0x00, 0x0f, 0x1e, 0x2d, 0x3c, 0x4b, 0x5a, 0x69,
6824 0x78, 0x87, 0x96, 0xa5, 0xb4, 0xc3, 0xd2, 0xe1,
6825 0xf0, 0xff, 0x0e, 0x1d, 0x2c, 0x3b, 0x4a, 0x59,
6826 0x68, 0x77, 0x86, 0x95, 0xa4, 0xb3, 0xc2, 0xd1,
6827 0xe0, 0xef, 0xfe, 0x0d, 0x1c, 0x2b, 0x3a, 0x49,
6828 0x58, 0x67, 0x76, 0x85, 0x94, 0xa3, 0xb2, 0xc1,
6829 0xd0, 0xdf, 0xee, 0xfd, 0x0c, 0x1b, 0x2a, 0x39,
6830 0x48, 0x57, 0x66, 0x75, 0x84, 0x93, 0xa2, 0xb1,
6831 0xc0, 0xcf, 0xde, 0xed, 0xfc, 0x0b, 0x1a, 0x29,
6832 0x38, 0x47, 0x56, 0x65, 0x74, 0x83, 0x92, 0xa1,
6833 0xb0, 0xbf, 0xce, 0xdd, 0xec, 0xfb, 0x0a, 0x19,
6834 0x28, 0x37, 0x46, 0x55, 0x64, 0x73, 0x82, 0x91,
6835 0xa0, 0xaf, 0xbe, 0xcd, 0xdc, 0xeb, 0xfa, 0x09,
6836 0x18, 0x27, 0x36, 0x45, 0x54, 0x63, 0x72, 0x81,
6837 0x90, 0x9f, 0xae, 0xbd, 0xcc, 0xdb, 0xea, 0xf9,
6838 0x08, 0x17, 0x26, 0x35, 0x44, 0x53, 0x62, 0x71,
6839 0x80, 0x8f, 0x9e, 0xad, 0xbc, 0xcb, 0xda, 0xe9,
6840 0xf8, 0x07, 0x16, 0x25, 0x34, 0x43, 0x52, 0x61,
6841 0x70, 0x7f, 0x8e, 0x9d, 0xac, 0xbb, 0xca, 0xd9,
6842 0xe8, 0xf7, 0x06, 0x15, 0x24, 0x33, 0x42, 0x51,
6843 0x60, 0x6f, 0x7e, 0x8d, 0x9c, 0xab, 0xba, 0xc9,
6844 0xd8, 0xe7, 0xf6, 0x05, 0x14, 0x23, 0x32, 0x41,
6845 0x50, 0x5f, 0x6e, 0x7d, 0x8c, 0x9b, 0xaa, 0xb9,
6846 0xc8, 0xd7, 0xe6, 0xf5, 0x04, 0x13, 0x22, 0x31,
6847 0x40, 0x4f, 0x5e, 0x6d, 0x7c, 0x8b, 0x9a, 0xa9,
6848 0xb8, 0xc7, 0xd6, 0xe5, 0xf4, 0x03, 0x12, 0x21,
6849 0x30, 0x3f, 0x4e, 0x5d, 0x6c, 0x7b, 0x8a, 0x99,
6850 0xa8, 0xb7, 0xc6, 0xd5, 0xe4, 0xf3, 0x02, 0x11,
6851 0x20, 0x2f, 0x3e, 0x4d, 0x5c, 0x6b, 0x7a, 0x89,
6852 0x98, 0xa7, 0xb6, 0xc5, 0xd4, 0xe3, 0xf2, 0x01,
6853 0x10, 0x1f, 0x2e, 0x3d, 0x4c, 0x5b, 0x6a, 0x79,
6854 0x88, 0x97, 0xa6, 0xb5, 0xc4, 0xd3, 0xe2, 0xf1,
6855 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77,
6856 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff,
6857 0x10, 0x21, 0x32, 0x43, 0x54, 0x65, 0x76, 0x87,
6858 0x98, 0xa9, 0xba, 0xcb, 0xdc, 0xed, 0xfe, 0x0f,
6859 0x20, 0x31, 0x42, 0x53, 0x64, 0x75, 0x86, 0x97,
6860 0xa8, 0xb9, 0xca, 0xdb, 0xec, 0xfd, 0x0e, 0x1f,
6861 0x30, 0x41, 0x52, 0x63, 0x74, 0x85, 0x96, 0xa7,
6862 0xb8, 0xc9, 0xda, 0xeb, 0xfc, 0x0d, 0x1e, 0x2f,
6863 0x40, 0x51, 0x62, 0x73, 0x84, 0x95, 0xa6, 0xb7,
6864 0xc8, 0xd9, 0xea, 0xfb, 0x0c, 0x1d, 0x2e, 0x3f,
6865 0x50, 0x61, 0x72, 0x83, 0x94, 0xa5, 0xb6, 0xc7,
6866 0xd8, 0xe9, 0xfa, 0x0b, 0x1c, 0x2d, 0x3e, 0x4f,
6867 0x60, 0x71, 0x82, 0x93, 0xa4, 0xb5, 0xc6, 0xd7,
6868 0xe8, 0xf9, 0x0a, 0x1b, 0x2c, 0x3d, 0x4e, 0x5f,
6869 0x70, 0x81, 0x92, 0xa3, 0xb4, 0xc5, 0xd6, 0xe7,
6870 0xf8, 0x09, 0x1a, 0x2b, 0x3c, 0x4d, 0x5e, 0x6f,
6871 0x80, 0x91, 0xa2, 0xb3, 0xc4, 0xd5, 0xe6, 0xf7,
6872 0x08, 0x19, 0x2a, 0x3b, 0x4c, 0x5d, 0x6e, 0x7f,
6873 0x90, 0xa1, 0xb2, 0xc3, 0xd4, 0xe5, 0xf6, 0x07,
6874 0x18, 0x29, 0x3a, 0x4b, 0x5c, 0x6d, 0x7e, 0x8f,
6875 0xa0, 0xb1, 0xc2, 0xd3, 0xe4, 0xf5, 0x06, 0x17,
6876 0x28, 0x39, 0x4a, 0x5b, 0x6c, 0x7d, 0x8e, 0x9f,
6877 0xb0, 0xc1, 0xd2, 0xe3, 0xf4, 0x05, 0x16, 0x27,
6878 0x38, 0x49, 0x5a, 0x6b, 0x7c, 0x8d, 0x9e, 0xaf,
6879 0xc0, 0xd1, 0xe2, 0xf3, 0x04, 0x15, 0x26, 0x37,
6880 0x48, 0x59, 0x6a, 0x7b, 0x8c, 0x9d, 0xae, 0xbf,
6881 0xd0, 0xe1, 0xf2, 0x03, 0x14, 0x25, 0x36, 0x47,
6882 0x58, 0x69, 0x7a, 0x8b, 0x9c, 0xad, 0xbe, 0xcf,
6883 0xe0, 0xf1, 0x02, 0x13, 0x24, 0x35, 0x46, 0x57,
6884 0x68, 0x79, 0x8a, 0x9b, 0xac, 0xbd, 0xce, 0xdf,
6885 0xf0, 0x01, 0x12, 0x23, 0x34, 0x45, 0x56, 0x67,
6886 0x78, 0x89, 0x9a, 0xab, 0xbc, 0xcd, 0xde, 0xef,
6887 0x00, 0x13, 0x26, 0x39, 0x4c, 0x5f, 0x72, 0x85,
6888 0x98, 0xab, 0xbe, 0xd1, 0xe4, 0xf7, 0x0a, 0x1d,
6889 0x30, 0x43, 0x56, 0x69, 0x7c, 0x8f, 0xa2, 0xb5,
6890 0xc8, 0xdb, 0xee, 0x01, 0x14, 0x27, 0x3a, 0x4d,
6891 0x60, 0x73, 0x86, 0x99, 0xac, 0xbf, 0xd2, 0xe5,
6892 0xf8, 0x0b, 0x1e, 0x31, 0x44, 0x57, 0x6a, 0x7d,
6893 0x90, 0xa3, 0xb6, 0xc9, 0xdc, 0xef, 0x02, 0x15,
6894 0x28, 0x3b, 0x4e, 0x61, 0x74, 0x87, 0x9a, 0xad,
6895 0xc0, 0xd3, 0xe6, 0xf9, 0x0c, 0x1f, 0x32, 0x45,
6896 0x58, 0x6b, 0x7e, 0x91, 0xa4, 0xb7, 0xca, 0xdd,
6897 0xf0, 0x03, 0x16, 0x29, 0x3c, 0x4f, 0x62, 0x75,
6898 0x88, 0x9b, 0xae, 0xc1, 0xd4, 0xe7, 0xfa, 0x0d,
6899 0x20, 0x33, 0x46, 0x59, 0x6c, 0x7f, 0x92, 0xa5,
6900 0xb8, 0xcb, 0xde, 0xf1, 0x04, 0x17, 0x2a, 0x3d,
6901 0x50, 0x63, 0x76, 0x89, 0x9c, 0xaf, 0xc2, 0xd5,
6902 0xe8, 0xfb, 0x0e, 0x21, 0x34, 0x47, 0x5a, 0x6d,
6903 0x80, 0x93, 0xa6, 0xb9, 0xcc, 0xdf, 0xf2, 0x05,
6904 0x18, 0x2b, 0x3e, 0x51, 0x64, 0x77, 0x8a, 0x9d,
6905 0xb0, 0xc3, 0xd6, 0xe9, 0xfc, 0x0f, 0x22, 0x35,
6906 0x48, 0x5b, 0x6e, 0x81, 0x94, 0xa7, 0xba, 0xcd,
6907 0xe0, 0xf3, 0x06, 0x19, 0x2c, 0x3f, 0x52, 0x65,
6908 0x78, 0x8b, 0x9e, 0xb1, 0xc4, 0xd7, 0xea, 0xfd,
6909 0x10, 0x23, 0x36, 0x49, 0x5c, 0x6f, 0x82, 0x95,
6910 0xa8, 0xbb, 0xce, 0xe1, 0xf4, 0x07, 0x1a, 0x2d,
6911 0x40, 0x53, 0x66, 0x79, 0x8c, 0x9f, 0xb2, 0xc5,
6912 0xd8, 0xeb, 0xfe, 0x11, 0x24, 0x37, 0x4a, 0x5d,
6913 0x70, 0x83, 0x96, 0xa9, 0xbc, 0xcf, 0xe2, 0xf5,
6914 0x08, 0x1b, 0x2e, 0x41, 0x54, 0x67, 0x7a, 0x8d,
6915 0xa0, 0xb3, 0xc6, 0xd9, 0xec, 0xff, 0x12, 0x25,
6916 0x38, 0x4b, 0x5e, 0x71, 0x84, 0x97, 0xaa, 0xbd,
6917 0xd0, 0xe3, 0xf6, 0x09, 0x1c, 0x2f, 0x42, 0x55,
6918 0x68, 0x7b, 0x8e, 0xa1, 0xb4, 0xc7, 0xda, 0xed,
6919 0x00, 0x15, 0x2a, 0x3f, 0x54, 0x69, 0x7e, 0x93,
6920 0xa8, 0xbd, 0xd2, 0xe7, 0xfc, 0x11, 0x26, 0x3b,
6921 0x50, 0x65, 0x7a, 0x8f, 0xa4, 0xb9, 0xce, 0xe3,
6922 0xf8, 0x0d, 0x22, 0x37, 0x4c, 0x61, 0x76, 0x8b,
6923 0xa0, 0xb5, 0xca, 0xdf, 0xf4, 0x09, 0x1e, 0x33,
6924 0x48, 0x5d, 0x72, 0x87, 0x9c, 0xb1, 0xc6, 0xdb,
6925 0xf0, 0x05, 0x1a, 0x2f, 0x44, 0x59, 0x6e, 0x83,
6926 0x98, 0xad, 0xc2, 0xd7, 0xec, 0x01, 0x16, 0x2b,
6927 0x40, 0x55, 0x6a, 0x7f, 0x94, 0xa9, 0xbe, 0xd3,
6928 0xe8, 0xfd, 0x12, 0x27, 0x3c, 0x51, 0x66, 0x7b,
6929 0x90, 0xa5, 0xba, 0xcf, 0xe4, 0xf9, 0x0e, 0x23,
6930 0x38, 0x4d, 0x62, 0x77, 0x8c, 0xa1, 0xb6, 0xcb,
6931 0xe0, 0xf5, 0x0a, 0x1f, 0x34, 0x49, 0x5e, 0x73,
6932 0x88, 0x9d, 0xb2, 0xc7, 0xdc, 0xf1, 0x06, 0x1b,
6933 0x30, 0x45, 0x5a, 0x6f, 0x84, 0x99, 0xae, 0xc3,
6934 0xd8, 0xed, 0x02, 0x17, 0x2c, 0x41, 0x56, 0x6b,
6935 0x80, 0x95, 0xaa, 0xbf, 0xd4, 0xe9, 0xfe, 0x13,
6936 0x28, 0x3d, 0x52, 0x67, 0x7c, 0x91, 0xa6, 0xbb,
6937 0xd0, 0xe5, 0xfa, 0x0f, 0x24, 0x39, 0x4e, 0x63,
6938 0x78, 0x8d, 0xa2, 0xb7, 0xcc, 0xe1, 0xf6, 0x0b,
6939 0x20, 0x35, 0x4a, 0x5f, 0x74, 0x89, 0x9e, 0xb3,
6940 0xc8, 0xdd, 0xf2, 0x07, 0x1c, 0x31, 0x46, 0x5b,
6941 0x70, 0x85, 0x9a, 0xaf, 0xc4, 0xd9, 0xee, 0x03,
6942 0x18, 0x2d, 0x42, 0x57, 0x6c, 0x81, 0x96, 0xab,
6943 0xc0, 0xd5, 0xea, 0xff, 0x14, 0x29, 0x3e, 0x53,
6944 0x68, 0x7d, 0x92, 0xa7, 0xbc, 0xd1, 0xe6, 0xfb,
6945 0x10, 0x25, 0x3a, 0x4f, 0x64, 0x79, 0x8e, 0xa3,
6946 0xb8, 0xcd, 0xe2, 0xf7, 0x0c, 0x21, 0x36, 0x4b,
6947 0x60, 0x75, 0x8a, 0x9f, 0xb4, 0xc9, 0xde, 0xf3,
6948 0x08, 0x1d, 0x32, 0x47, 0x5c, 0x71, 0x86, 0x9b,
6949 0xb0, 0xc5, 0xda, 0xef, 0x04, 0x19, 0x2e, 0x43,
6950 0x58, 0x6d, 0x82, 0x97, 0xac, 0xc1, 0xd6, 0xeb,
6951 0x00, 0x17, 0x2e, 0x45, 0x5c, 0x73, 0x8a, 0xa1,
6952 0xb8, 0xcf, 0xe6, 0xfd, 0x14, 0x2b, 0x42, 0x59,
6953 0x70, 0x87, 0x9e, 0xb5, 0xcc, 0xe3, 0xfa, 0x11,
6954 0x28, 0x3f, 0x56, 0x6d, 0x84, 0x9b, 0xb2, 0xc9,
6955 0xe0, 0xf7, 0x0e, 0x25, 0x3c, 0x53, 0x6a, 0x81,
6956 0x98, 0xaf, 0xc6, 0xdd, 0xf4, 0x0b, 0x22, 0x39,
6957 0x50, 0x67, 0x7e, 0x95, 0xac, 0xc3, 0xda, 0xf1,
6958 0x08, 0x1f, 0x36, 0x4d, 0x64, 0x7b, 0x92, 0xa9,
6959 0xc0, 0xd7, 0xee, 0x05, 0x1c, 0x33, 0x4a, 0x61,
6960 0x78, 0x8f, 0xa6, 0xbd, 0xd4, 0xeb, 0x02, 0x19,
6961 0x30, 0x47, 0x5e, 0x75, 0x8c, 0xa3, 0xba, 0xd1,
6962 0xe8, 0xff, 0x16, 0x2d, 0x44, 0x5b, 0x72, 0x89,
6963 0xa0, 0xb7, 0xce, 0xe5, 0xfc, 0x13, 0x2a, 0x41,
6964 0x58, 0x6f, 0x86, 0x9d, 0xb4, 0xcb, 0xe2, 0xf9,
6965 0x10, 0x27, 0x3e, 0x55, 0x6c, 0x83, 0x9a, 0xb1,
6966 0xc8, 0xdf, 0xf6, 0x0d, 0x24, 0x3b, 0x52, 0x69,
6967 0x80, 0x97, 0xae, 0xc5, 0xdc, 0xf3, 0x0a, 0x21,
6968 0x38, 0x4f, 0x66, 0x7d, 0x94, 0xab, 0xc2, 0xd9,
6969 0xf0, 0x07, 0x1e, 0x35, 0x4c, 0x63, 0x7a, 0x91,
6970 0xa8, 0xbf, 0xd6, 0xed, 0x04, 0x1b, 0x32, 0x49,
6971 0x60, 0x77, 0x8e, 0xa5, 0xbc, 0xd3, 0xea, 0x01,
6972 0x18, 0x2f, 0x46, 0x5d, 0x74, 0x8b, 0xa2, 0xb9,
6973 0xd0, 0xe7, 0xfe, 0x15, 0x2c, 0x43, 0x5a, 0x71,
6974 0x88, 0x9f, 0xb6, 0xcd, 0xe4, 0xfb, 0x12, 0x29,
6975 0x40, 0x57, 0x6e, 0x85, 0x9c, 0xb3, 0xca, 0xe1,
6976 0xf8, 0x0f, 0x26, 0x3d, 0x54, 0x6b, 0x82, 0x99,
6977 0xb0, 0xc7, 0xde, 0xf5, 0x0c, 0x23, 0x3a, 0x51,
6978 0x68, 0x7f, 0x96, 0xad, 0xc4, 0xdb, 0xf2, 0x09,
6979 0x20, 0x37, 0x4e, 0x65, 0x7c, 0x93, 0xaa, 0xc1,
6980 0xd8, 0xef, 0x06, 0x1d, 0x34, 0x4b, 0x62, 0x79,
6981 0x90, 0xa7, 0xbe, 0xd5, 0xec, 0x03, 0x1a, 0x31,
6982 0x48, 0x5f, 0x76, 0x8d, 0xa4, 0xbb, 0xd2, 0xe9,
6983 0x00, 0x19, 0x32, 0x4b, 0x64, 0x7d, 0x96, 0xaf,
6984 0xc8, 0xe1, 0xfa, 0x13, 0x2c, 0x45, 0x5e, 0x77,
6985 0x90, 0xa9, 0xc2, 0xdb, 0xf4, 0x0d, 0x26, 0x3f,
6986 0x58, 0x71, 0x8a, 0xa3, 0xbc, 0xd5, 0xee, 0x07,
6987 0x20, 0x39, 0x52, 0x6b, 0x84, 0x9d, 0xb6, 0xcf,
6988 0xe8, 0x01, 0x1a, 0x33, 0x4c, 0x65, 0x7e, 0x97,
6989 0xb0, 0xc9, 0xe2, 0xfb, 0x14, 0x2d, 0x46, 0x5f,
6990 0x78, 0x91, 0xaa, 0xc3, 0xdc, 0xf5, 0x0e, 0x27,
6991 0x40, 0x59, 0x72, 0x8b, 0xa4, 0xbd, 0xd6, 0xef,
6992 0x08, 0x21, 0x3a, 0x53, 0x6c, 0x85, 0x9e, 0xb7,
6993 0xd0, 0xe9, 0x02, 0x1b, 0x34, 0x4d, 0x66, 0x7f,
6994 0x98, 0xb1, 0xca, 0xe3, 0xfc, 0x15, 0x2e, 0x47,
6995 0x60, 0x79, 0x92, 0xab, 0xc4, 0xdd, 0xf6, 0x0f,
6996 0x28, 0x41, 0x5a, 0x73, 0x8c, 0xa5, 0xbe, 0xd7,
6997 0xf0, 0x09, 0x22, 0x3b, 0x54, 0x6d, 0x86, 0x9f,
6998 0xb8, 0xd1, 0xea, 0x03, 0x1c, 0x35, 0x4e, 0x67,
6999 0x80, 0x99, 0xb2, 0xcb, 0xe4, 0xfd, 0x16, 0x2f,
7000 0x48, 0x61, 0x7a, 0x93, 0xac, 0xc5, 0xde, 0xf7,
7001 0x10, 0x29, 0x42, 0x5b, 0x74, 0x8d, 0xa6, 0xbf,
7002 0xd8, 0xf1, 0x0a, 0x23, 0x3c, 0x55, 0x6e, 0x87,
7003 0xa0, 0xb9, 0xd2, 0xeb, 0x04, 0x1d, 0x36, 0x4f,
7004 0x68, 0x81, 0x9a, 0xb3, 0xcc, 0xe5, 0xfe, 0x17,
7005 0x30, 0x49, 0x62, 0x7b, 0x94, 0xad, 0xc6, 0xdf,
7006 0xf8, 0x11, 0x2a, 0x43, 0x5c, 0x75, 0x8e, 0xa7,
7007 0xc0, 0xd9, 0xf2, 0x0b, 0x24, 0x3d, 0x56, 0x6f,
7008 0x88, 0xa1, 0xba, 0xd3, 0xec, 0x05, 0x1e, 0x37,
7009 0x50, 0x69, 0x82, 0x9b, 0xb4, 0xcd, 0xe6, 0xff,
7010 0x18, 0x31, 0x4a, 0x63, 0x7c, 0x95, 0xae, 0xc7,
7011 0xe0, 0xf9, 0x12, 0x2b, 0x44, 0x5d, 0x76, 0x8f,
7012 0xa8, 0xc1, 0xda, 0xf3, 0x0c, 0x25, 0x3e, 0x57,
7013 0x70, 0x89, 0xa2, 0xbb, 0xd4, 0xed, 0x06, 0x1f,
7014 0x38, 0x51, 0x6a, 0x83, 0x9c, 0xb5, 0xce, 0xe7,
7015 0x00, 0x1b, 0x36, 0x51, 0x6c, 0x87, 0xa2, 0xbd,
7016 0xd8, 0xf3, 0x0e, 0x29, 0x44, 0x5f, 0x7a, 0x95,
7017 0xb0, 0xcb, 0xe6, 0x01, 0x1c, 0x37, 0x52, 0x6d,
7018 0x88, 0xa3, 0xbe, 0xd9, 0xf4, 0x0f, 0x2a, 0x45,
7019 0x60, 0x7b, 0x96, 0xb1, 0xcc, 0xe7, 0x02, 0x1d,
7020 0x38, 0x53, 0x6e, 0x89, 0xa4, 0xbf, 0xda, 0xf5,
7021 0x10, 0x2b, 0x46, 0x61, 0x7c, 0x97, 0xb2, 0xcd,
7022 0xe8, 0x03, 0x1e, 0x39, 0x54, 0x6f, 0x8a, 0xa5,
7023 0xc0, 0xdb, 0xf6, 0x11, 0x2c, 0x47, 0x62, 0x7d,
7024 0x98, 0xb3, 0xce, 0xe9, 0x04, 0x1f, 0x3a, 0x55,
7025 0x70, 0x8b, 0xa6, 0xc1, 0xdc, 0xf7, 0x12, 0x2d,
7026 0x48, 0x63, 0x7e, 0x99, 0xb4, 0xcf, 0xea, 0x05,
7027 0x20, 0x3b, 0x56, 0x71, 0x8c, 0xa7, 0xc2, 0xdd,
7028 0xf8, 0x13, 0x2e, 0x49, 0x64, 0x7f, 0x9a, 0xb5,
7029 0xd0, 0xeb, 0x06, 0x21, 0x3c, 0x57, 0x72, 0x8d,
7030 0xa8, 0xc3, 0xde, 0xf9, 0x14, 0x2f, 0x4a, 0x65,
7031 0x80, 0x9b, 0xb6, 0xd1, 0xec, 0x07, 0x22, 0x3d,
7032 0x58, 0x73, 0x8e, 0xa9, 0xc4, 0xdf, 0xfa, 0x15,
7033 0x30, 0x4b, 0x66, 0x81, 0x9c, 0xb7, 0xd2, 0xed,
7034 0x08, 0x23, 0x3e, 0x59, 0x74, 0x8f, 0xaa, 0xc5,
7035 0xe0, 0xfb, 0x16, 0x31, 0x4c, 0x67, 0x82, 0x9d,
7036 0xb8, 0xd3, 0xee, 0x09, 0x24, 0x3f, 0x5a, 0x75,
7037 0x90, 0xab, 0xc6, 0xe1, 0xfc, 0x17, 0x32, 0x4d,
7038 0x68, 0x83, 0x9e, 0xb9, 0xd4, 0xef, 0x0a, 0x25,
7039 0x40, 0x5b, 0x76, 0x91, 0xac, 0xc7, 0xe2, 0xfd,
7040 0x18, 0x33, 0x4e, 0x69, 0x84, 0x9f, 0xba, 0xd5,
7041 0xf0, 0x0b, 0x26, 0x41, 0x5c, 0x77, 0x92, 0xad,
7042 0xc8, 0xe3, 0xfe, 0x19, 0x34, 0x4f, 0x6a, 0x85,
7043 0xa0, 0xbb, 0xd6, 0xf1, 0x0c, 0x27, 0x42, 0x5d,
7044 0x78, 0x93, 0xae, 0xc9, 0xe4, 0xff, 0x1a, 0x35,
7045 0x50, 0x6b, 0x86, 0xa1, 0xbc, 0xd7, 0xf2, 0x0d,
7046 0x28, 0x43, 0x5e, 0x79, 0x94, 0xaf, 0xca, 0xe5,
7047 0x00, 0x1d, 0x3a, 0x57, 0x74, 0x91, 0xae, 0xcb,
7048 0xe8, 0x05, 0x22, 0x3f, 0x5c, 0x79, 0x96, 0xb3,
7049 0xd0, 0xed, 0x0a, 0x27, 0x44, 0x61, 0x7e, 0x9b,
7050 0xb8, 0xd5, 0xf2, 0x0f, 0x2c, 0x49, 0x66, 0x83,
7051 0xa0, 0xbd, 0xda, 0xf7, 0x14, 0x31, 0x4e, 0x6b,
7052 0x88, 0xa5, 0xc2, 0xdf, 0xfc, 0x19, 0x36, 0x53,
7053 0x70, 0x8d, 0xaa, 0xc7, 0xe4, 0x01, 0x1e, 0x3b,
7054 0x58, 0x75, 0x92, 0xaf, 0xcc, 0xe9, 0x06, 0x23,
7055 0x40, 0x5d, 0x7a, 0x97, 0xb4, 0xd1, 0xee, 0x0b,
7056 0x28, 0x45, 0x62, 0x7f, 0x9c, 0xb9, 0xd6, 0xf3,
7057 0x10, 0x2d, 0x4a, 0x67, 0x84, 0xa1, 0xbe, 0xdb,
7058 0xf8, 0x15, 0x32, 0x4f, 0x6c, 0x89, 0xa6, 0xc3,
7059 0xe0, 0xfd, 0x1a, 0x37, 0x54, 0x71, 0x8e, 0xab,
7060 0xc8, 0xe5, 0x02, 0x1f, 0x3c, 0x59, 0x76, 0x93,
7061 0xb0, 0xcd, 0xea, 0x07, 0x24, 0x41, 0x5e, 0x7b,
7062 0x98, 0xb5, 0xd2, 0xef, 0x0c, 0x29, 0x46, 0x63,
7063 0x80, 0x9d, 0xba, 0xd7, 0xf4, 0x11, 0x2e, 0x4b,
7064 0x68, 0x85, 0xa2, 0xbf, 0xdc, 0xf9, 0x16, 0x33,
7065 0x50, 0x6d, 0x8a, 0xa7, 0xc4, 0xe1, 0xfe, 0x1b,
7066 0x38, 0x55, 0x72, 0x8f, 0xac, 0xc9, 0xe6, 0x03,
7067 0x20, 0x3d, 0x5a, 0x77, 0x94, 0xb1, 0xce, 0xeb,
7068 0x08, 0x25, 0x42, 0x5f, 0x7c, 0x99, 0xb6, 0xd3,
7069 0xf0, 0x0d, 0x2a, 0x47, 0x64, 0x81, 0x9e, 0xbb,
7070 0xd8, 0xf5, 0x12, 0x2f, 0x4c, 0x69, 0x86, 0xa3,
7071 0xc0, 0xdd, 0xfa, 0x17, 0x34, 0x51, 0x6e, 0x8b,
7072 0xa8, 0xc5, 0xe2, 0xff, 0x1c, 0x39, 0x56, 0x73,
7073 0x90, 0xad, 0xca, 0xe7, 0x04, 0x21, 0x3e, 0x5b,
7074 0x78, 0x95, 0xb2, 0xcf, 0xec, 0x09, 0x26, 0x43,
7075 0x60, 0x7d, 0x9a, 0xb7, 0xd4, 0xf1, 0x0e, 0x2b,
7076 0x48, 0x65, 0x82, 0x9f, 0xbc, 0xd9, 0xf6, 0x13,
7077 0x30, 0x4d, 0x6a, 0x87, 0xa4, 0xc1, 0xde, 0xfb,
7078 0x18, 0x35, 0x52, 0x6f, 0x8c, 0xa9, 0xc6, 0xe3,
7079 0x00, 0x1f, 0x3e, 0x5d, 0x7c, 0x9b, 0xba, 0xd9,
7080 0xf8, 0x17, 0x36, 0x55, 0x74, 0x93, 0xb2, 0xd1,
7081 0xf0, 0x0f, 0x2e, 0x4d, 0x6c, 0x8b, 0xaa, 0xc9,
7082 0xe8, 0x07, 0x26, 0x45, 0x64, 0x83, 0xa2, 0xc1,
7083 0xe0, 0xff, 0x1e, 0x3d, 0x5c, 0x7b, 0x9a, 0xb9,
7084 0xd8, 0xf7, 0x16, 0x35, 0x54, 0x73, 0x92, 0xb1,
7085 0xd0, 0xef, 0x0e, 0x2d, 0x4c, 0x6b, 0x8a, 0xa9,
7086 0xc8, 0xe7, 0x06, 0x25, 0x44, 0x63, 0x82, 0xa1,
7087 0xc0, 0xdf, 0xfe, 0x1d, 0x3c, 0x5b, 0x7a, 0x99,
7088 0xb8, 0xd7, 0xf6, 0x15, 0x34, 0x53, 0x72, 0x91,
7089 0xb0, 0xcf, 0xee, 0x0d, 0x2c, 0x4b, 0x6a, 0x89,
7090 0xa8, 0xc7, 0xe6, 0x05, 0x24, 0x43, 0x62, 0x81,
7091 0xa0, 0xbf, 0xde, 0xfd, 0x1c, 0x3b, 0x5a, 0x79,
7092 0x98, 0xb7, 0xd6, 0xf5, 0x14, 0x33, 0x52, 0x71,
7093 0x90, 0xaf, 0xce, 0xed, 0x0c, 0x2b, 0x4a, 0x69,
7094 0x88, 0xa7, 0xc6, 0xe5, 0x04, 0x23, 0x42, 0x61,
7095 0x80, 0x9f, 0xbe, 0xdd, 0xfc, 0x1b, 0x3a, 0x59,
7096 0x78, 0x97, 0xb6, 0xd5, 0xf4, 0x13, 0x32, 0x51,
7097 0x70, 0x8f, 0xae, 0xcd, 0xec, 0x0b, 0x2a, 0x49,
7098 0x68, 0x87, 0xa6, 0xc5, 0xe4, 0x03, 0x22, 0x41,
7099 0x60, 0x7f, 0x9e, 0xbd, 0xdc, 0xfb, 0x1a, 0x39,
7100 0x58, 0x77, 0x96, 0xb5, 0xd4, 0xf3, 0x12, 0x31,
7101 0x50, 0x6f, 0x8e, 0xad, 0xcc, 0xeb, 0x0a, 0x29,
7102 0x48, 0x67, 0x86, 0xa5, 0xc4, 0xe3, 0x02, 0x21,
7103 0x40, 0x5f, 0x7e, 0x9d, 0xbc, 0xdb, 0xfa, 0x19,
7104 0x38, 0x57, 0x76, 0x95, 0xb4, 0xd3, 0xf2, 0x11,
7105 0x30, 0x4f, 0x6e, 0x8d, 0xac, 0xcb, 0xea, 0x09,
7106 0x28, 0x47, 0x66, 0x85, 0xa4, 0xc3, 0xe2, 0x01,
7107 0x20, 0x3f, 0x5e, 0x7d, 0x9c, 0xbb, 0xda, 0xf9,
7108 0x18, 0x37, 0x56, 0x75, 0x94, 0xb3, 0xd2, 0xf1,
7109 0x10, 0x2f, 0x4e, 0x6d, 0x8c, 0xab, 0xca, 0xe9,
7110 0x08, 0x27, 0x46, 0x65, 0x84, 0xa3, 0xc2, 0xe1,
7111 0x00, 0x21, 0x42, 0x63,
7112 },
7113 .ilen = 4100,
7114 .result = {
7115 0xb5, 0x81, 0xf5, 0x64, 0x18, 0x73, 0xe3, 0xf0,
7116 0x4c, 0x13, 0xf2, 0x77, 0x18, 0x60, 0x65, 0x5e,
7117 0x29, 0x01, 0xce, 0x98, 0x55, 0x53, 0xf9, 0x0c,
7118 0x2a, 0x08, 0xd5, 0x09, 0xb3, 0x57, 0x55, 0x56,
7119 0xc5, 0xe9, 0x56, 0x90, 0xcb, 0x6a, 0xa3, 0xc0,
7120 0xff, 0xc4, 0x79, 0xb4, 0xd2, 0x97, 0x5d, 0xc4,
7121 0x43, 0xd1, 0xfe, 0x94, 0x7b, 0x88, 0x06, 0x5a,
7122 0xb2, 0x9e, 0x2c, 0xfc, 0x44, 0x03, 0xb7, 0x90,
7123 0xa0, 0xc1, 0xba, 0x6a, 0x33, 0xb8, 0xc7, 0xb2,
7124 0x9d, 0xe1, 0x12, 0x4f, 0xc0, 0x64, 0xd4, 0x01,
7125 0xfe, 0x8c, 0x7a, 0x66, 0xf7, 0xe6, 0x5a, 0x91,
7126 0xbb, 0xde, 0x56, 0x86, 0xab, 0x65, 0x21, 0x30,
7127 0x00, 0x84, 0x65, 0x24, 0xa5, 0x7d, 0x85, 0xb4,
7128 0xe3, 0x17, 0xed, 0x3a, 0xb7, 0x6f, 0xb4, 0x0b,
7129 0x0b, 0xaf, 0x15, 0xae, 0x5a, 0x8f, 0xf2, 0x0c,
7130 0x2f, 0x27, 0xf4, 0x09, 0xd8, 0xd2, 0x96, 0xb7,
7131 0x71, 0xf2, 0xc5, 0x99, 0x4d, 0x7e, 0x7f, 0x75,
7132 0x77, 0x89, 0x30, 0x8b, 0x59, 0xdb, 0xa2, 0xb2,
7133 0xa0, 0xf3, 0x19, 0x39, 0x2b, 0xc5, 0x7e, 0x3f,
7134 0x4f, 0xd9, 0xd3, 0x56, 0x28, 0x97, 0x44, 0xdc,
7135 0xc0, 0x8b, 0x77, 0x24, 0xd9, 0x52, 0xe7, 0xc5,
7136 0xaf, 0xf6, 0x7d, 0x59, 0xb2, 0x44, 0x05, 0x1d,
7137 0xb1, 0xb0, 0x11, 0xa5, 0x0f, 0xec, 0x33, 0xe1,
7138 0x6d, 0x1b, 0x4e, 0x1f, 0xff, 0x57, 0x91, 0xb4,
7139 0x5b, 0x9a, 0x96, 0xc5, 0x53, 0xbc, 0xae, 0x20,
7140 0x3c, 0xbb, 0x14, 0xe2, 0xe8, 0x22, 0x33, 0xc1,
7141 0x5e, 0x76, 0x9e, 0x46, 0x99, 0xf6, 0x2a, 0x15,
7142 0xc6, 0x97, 0x02, 0xa0, 0x66, 0x43, 0xd1, 0xa6,
7143 0x31, 0xa6, 0x9f, 0xfb, 0xf4, 0xd3, 0x69, 0xe5,
7144 0xcd, 0x76, 0x95, 0xb8, 0x7a, 0x82, 0x7f, 0x21,
7145 0x45, 0xff, 0x3f, 0xce, 0x55, 0xf6, 0x95, 0x10,
7146 0x08, 0x77, 0x10, 0x43, 0xc6, 0xf3, 0x09, 0xe5,
7147 0x68, 0xe7, 0x3c, 0xad, 0x00, 0x52, 0x45, 0x0d,
7148 0xfe, 0x2d, 0xc6, 0xc2, 0x94, 0x8c, 0x12, 0x1d,
7149 0xe6, 0x25, 0xae, 0x98, 0x12, 0x8e, 0x19, 0x9c,
7150 0x81, 0x68, 0xb1, 0x11, 0xf6, 0x69, 0xda, 0xe3,
7151 0x62, 0x08, 0x18, 0x7a, 0x25, 0x49, 0x28, 0xac,
7152 0xba, 0x71, 0x12, 0x0b, 0xe4, 0xa2, 0xe5, 0xc7,
7153 0x5d, 0x8e, 0xec, 0x49, 0x40, 0x21, 0xbf, 0x5a,
7154 0x98, 0xf3, 0x02, 0x68, 0x55, 0x03, 0x7f, 0x8a,
7155 0xe5, 0x94, 0x0c, 0x32, 0x5c, 0x07, 0x82, 0x63,
7156 0xaf, 0x6f, 0x91, 0x40, 0x84, 0x8e, 0x52, 0x25,
7157 0xd0, 0xb0, 0x29, 0x53, 0x05, 0xe2, 0x50, 0x7a,
7158 0x34, 0xeb, 0xc9, 0x46, 0x20, 0xa8, 0x3d, 0xde,
7159 0x7f, 0x16, 0x5f, 0x36, 0xc5, 0x2e, 0xdc, 0xd1,
7160 0x15, 0x47, 0xc7, 0x50, 0x40, 0x6d, 0x91, 0xc5,
7161 0xe7, 0x93, 0x95, 0x1a, 0xd3, 0x57, 0xbc, 0x52,
7162 0x33, 0xee, 0x14, 0x19, 0x22, 0x52, 0x89, 0xa7,
7163 0x4a, 0x25, 0x56, 0x77, 0x4b, 0xca, 0xcf, 0x0a,
7164 0xe1, 0xf5, 0x35, 0x85, 0x30, 0x7e, 0x59, 0x4a,
7165 0xbd, 0x14, 0x5b, 0xdf, 0xe3, 0x46, 0xcb, 0xac,
7166 0x1f, 0x6c, 0x96, 0x0e, 0xf4, 0x81, 0xd1, 0x99,
7167 0xca, 0x88, 0x63, 0x3d, 0x02, 0x58, 0x6b, 0xa9,
7168 0xe5, 0x9f, 0xb3, 0x00, 0xb2, 0x54, 0xc6, 0x74,
7169 0x1c, 0xbf, 0x46, 0xab, 0x97, 0xcc, 0xf8, 0x54,
7170 0x04, 0x07, 0x08, 0x52, 0xe6, 0xc0, 0xda, 0x93,
7171 0x74, 0x7d, 0x93, 0x99, 0x5d, 0x78, 0x68, 0xa6,
7172 0x2e, 0x6b, 0xd3, 0x6a, 0x69, 0xcc, 0x12, 0x6b,
7173 0xd4, 0xc7, 0xa5, 0xc6, 0xe7, 0xf6, 0x03, 0x04,
7174 0x5d, 0xcd, 0x61, 0x5e, 0x17, 0x40, 0xdc, 0xd1,
7175 0x5c, 0xf5, 0x08, 0xdf, 0x5c, 0x90, 0x85, 0xa4,
7176 0xaf, 0xf6, 0x78, 0xbb, 0x0d, 0xf1, 0xf4, 0xa4,
7177 0x54, 0x26, 0x72, 0x9e, 0x61, 0xfa, 0x86, 0xcf,
7178 0xe8, 0x9e, 0xa1, 0xe0, 0xc7, 0x48, 0x23, 0xae,
7179 0x5a, 0x90, 0xae, 0x75, 0x0a, 0x74, 0x18, 0x89,
7180 0x05, 0xb1, 0x92, 0xb2, 0x7f, 0xd0, 0x1b, 0xa6,
7181 0x62, 0x07, 0x25, 0x01, 0xc7, 0xc2, 0x4f, 0xf9,
7182 0xe8, 0xfe, 0x63, 0x95, 0x80, 0x07, 0xb4, 0x26,
7183 0xcc, 0xd1, 0x26, 0xb6, 0xc4, 0x3f, 0x9e, 0xcb,
7184 0x8e, 0x3b, 0x2e, 0x44, 0x16, 0xd3, 0x10, 0x9a,
7185 0x95, 0x08, 0xeb, 0xc8, 0xcb, 0xeb, 0xbf, 0x6f,
7186 0x0b, 0xcd, 0x1f, 0xc8, 0xca, 0x86, 0xaa, 0xec,
7187 0x33, 0xe6, 0x69, 0xf4, 0x45, 0x25, 0x86, 0x3a,
7188 0x22, 0x94, 0x4f, 0x00, 0x23, 0x6a, 0x44, 0xc2,
7189 0x49, 0x97, 0x33, 0xab, 0x36, 0x14, 0x0a, 0x70,
7190 0x24, 0xc3, 0xbe, 0x04, 0x3b, 0x79, 0xa0, 0xf9,
7191 0xb8, 0xe7, 0x76, 0x29, 0x22, 0x83, 0xd7, 0xf2,
7192 0x94, 0xf4, 0x41, 0x49, 0xba, 0x5f, 0x7b, 0x07,
7193 0xb5, 0xfb, 0xdb, 0x03, 0x1a, 0x9f, 0xb6, 0x4c,
7194 0xc2, 0x2e, 0x37, 0x40, 0x49, 0xc3, 0x38, 0x16,
7195 0xe2, 0x4f, 0x77, 0x82, 0xb0, 0x68, 0x4c, 0x71,
7196 0x1d, 0x57, 0x61, 0x9c, 0xd9, 0x4e, 0x54, 0x99,
7197 0x47, 0x13, 0x28, 0x73, 0x3c, 0xbb, 0x00, 0x90,
7198 0xf3, 0x4d, 0xc9, 0x0e, 0xfd, 0xe7, 0xb1, 0x71,
7199 0xd3, 0x15, 0x79, 0xbf, 0xcc, 0x26, 0x2f, 0xbd,
7200 0xad, 0x6c, 0x50, 0x69, 0x6c, 0x3e, 0x6d, 0x80,
7201 0x9a, 0xea, 0x78, 0xaf, 0x19, 0xb2, 0x0d, 0x4d,
7202 0xad, 0x04, 0x07, 0xae, 0x22, 0x90, 0x4a, 0x93,
7203 0x32, 0x0e, 0x36, 0x9b, 0x1b, 0x46, 0xba, 0x3b,
7204 0xb4, 0xac, 0xc6, 0xd1, 0xa2, 0x31, 0x53, 0x3b,
7205 0x2a, 0x3d, 0x45, 0xfe, 0x03, 0x61, 0x10, 0x85,
7206 0x17, 0x69, 0xa6, 0x78, 0xcc, 0x6c, 0x87, 0x49,
7207 0x53, 0xf9, 0x80, 0x10, 0xde, 0x80, 0xa2, 0x41,
7208 0x6a, 0xc3, 0x32, 0x02, 0xad, 0x6d, 0x3c, 0x56,
7209 0x00, 0x71, 0x51, 0x06, 0xa7, 0xbd, 0xfb, 0xef,
7210 0x3c, 0xb5, 0x9f, 0xfc, 0x48, 0x7d, 0x53, 0x7c,
7211 0x66, 0xb0, 0x49, 0x23, 0xc4, 0x47, 0x10, 0x0e,
7212 0xe5, 0x6c, 0x74, 0x13, 0xe6, 0xc5, 0x3f, 0xaa,
7213 0xde, 0xff, 0x07, 0x44, 0xdd, 0x56, 0x1b, 0xad,
7214 0x09, 0x77, 0xfb, 0x5b, 0x12, 0xb8, 0x0d, 0x38,
7215 0x17, 0x37, 0x35, 0x7b, 0x9b, 0xbc, 0xfe, 0xd4,
7216 0x7e, 0x8b, 0xda, 0x7e, 0x5b, 0x04, 0xa7, 0x22,
7217 0xa7, 0x31, 0xa1, 0x20, 0x86, 0xc7, 0x1b, 0x99,
7218 0xdb, 0xd1, 0x89, 0xf4, 0x94, 0xa3, 0x53, 0x69,
7219 0x8d, 0xe7, 0xe8, 0x74, 0x11, 0x8d, 0x74, 0xd6,
7220 0x07, 0x37, 0x91, 0x9f, 0xfd, 0x67, 0x50, 0x3a,
7221 0xc9, 0xe1, 0xf4, 0x36, 0xd5, 0xa0, 0x47, 0xd1,
7222 0xf9, 0xe5, 0x39, 0xa3, 0x31, 0xac, 0x07, 0x36,
7223 0x23, 0xf8, 0x66, 0x18, 0x14, 0x28, 0x34, 0x0f,
7224 0xb8, 0xd0, 0xe7, 0x29, 0xb3, 0x04, 0x4b, 0x55,
7225 0x01, 0x41, 0xb2, 0x75, 0x8d, 0xcb, 0x96, 0x85,
7226 0x3a, 0xfb, 0xab, 0x2b, 0x9e, 0xfa, 0x58, 0x20,
7227 0x44, 0x1f, 0xc0, 0x14, 0x22, 0x75, 0x61, 0xe8,
7228 0xaa, 0x19, 0xcf, 0xf1, 0x82, 0x56, 0xf4, 0xd7,
7229 0x78, 0x7b, 0x3d, 0x5f, 0xb3, 0x9e, 0x0b, 0x8a,
7230 0x57, 0x50, 0xdb, 0x17, 0x41, 0x65, 0x4d, 0xa3,
7231 0x02, 0xc9, 0x9c, 0x9c, 0x53, 0xfb, 0x39, 0x39,
7232 0x9b, 0x1d, 0x72, 0x24, 0xda, 0xb7, 0x39, 0xbe,
7233 0x13, 0x3b, 0xfa, 0x29, 0xda, 0x9e, 0x54, 0x64,
7234 0x6e, 0xba, 0xd8, 0xa1, 0xcb, 0xb3, 0x36, 0xfa,
7235 0xcb, 0x47, 0x85, 0xe9, 0x61, 0x38, 0xbc, 0xbe,
7236 0xc5, 0x00, 0x38, 0x2a, 0x54, 0xf7, 0xc4, 0xb9,
7237 0xb3, 0xd3, 0x7b, 0xa0, 0xa0, 0xf8, 0x72, 0x7f,
7238 0x8c, 0x8e, 0x82, 0x0e, 0xc6, 0x1c, 0x75, 0x9d,
7239 0xca, 0x8e, 0x61, 0x87, 0xde, 0xad, 0x80, 0xd2,
7240 0xf5, 0xf9, 0x80, 0xef, 0x15, 0x75, 0xaf, 0xf5,
7241 0x80, 0xfb, 0xff, 0x6d, 0x1e, 0x25, 0xb7, 0x40,
7242 0x61, 0x6a, 0x39, 0x5a, 0x6a, 0xb5, 0x31, 0xab,
7243 0x97, 0x8a, 0x19, 0x89, 0x44, 0x40, 0xc0, 0xa6,
7244 0xb4, 0x4e, 0x30, 0x32, 0x7b, 0x13, 0xe7, 0x67,
7245 0xa9, 0x8b, 0x57, 0x04, 0xc2, 0x01, 0xa6, 0xf4,
7246 0x28, 0x99, 0xad, 0x2c, 0x76, 0xa3, 0x78, 0xc2,
7247 0x4a, 0xe6, 0xca, 0x5c, 0x50, 0x6a, 0xc1, 0xb0,
7248 0x62, 0x4b, 0x10, 0x8e, 0x7c, 0x17, 0x43, 0xb3,
7249 0x17, 0x66, 0x1c, 0x3e, 0x8d, 0x69, 0xf0, 0x5a,
7250 0x71, 0xf5, 0x97, 0xdc, 0xd1, 0x45, 0xdd, 0x28,
7251 0xf3, 0x5d, 0xdf, 0x53, 0x7b, 0x11, 0xe5, 0xbc,
7252 0x4c, 0xdb, 0x1b, 0x51, 0x6b, 0xe9, 0xfb, 0x3d,
7253 0xc1, 0xc3, 0x2c, 0xb9, 0x71, 0xf5, 0xb6, 0xb2,
7254 0x13, 0x36, 0x79, 0x80, 0x53, 0xe8, 0xd3, 0xa6,
7255 0x0a, 0xaf, 0xfd, 0x56, 0x97, 0xf7, 0x40, 0x8e,
7256 0x45, 0xce, 0xf8, 0xb0, 0x9e, 0x5c, 0x33, 0x82,
7257 0xb0, 0x44, 0x56, 0xfc, 0x05, 0x09, 0xe9, 0x2a,
7258 0xac, 0x26, 0x80, 0x14, 0x1d, 0xc8, 0x3a, 0x35,
7259 0x4c, 0x82, 0x97, 0xfd, 0x76, 0xb7, 0xa9, 0x0a,
7260 0x35, 0x58, 0x79, 0x8e, 0x0f, 0x66, 0xea, 0xaf,
7261 0x51, 0x6c, 0x09, 0xa9, 0x6e, 0x9b, 0xcb, 0x9a,
7262 0x31, 0x47, 0xa0, 0x2f, 0x7c, 0x71, 0xb4, 0x4a,
7263 0x11, 0xaa, 0x8c, 0x66, 0xc5, 0x64, 0xe6, 0x3a,
7264 0x54, 0xda, 0x24, 0x6a, 0xc4, 0x41, 0x65, 0x46,
7265 0x82, 0xa0, 0x0a, 0x0f, 0x5f, 0xfb, 0x25, 0xd0,
7266 0x2c, 0x91, 0xa7, 0xee, 0xc4, 0x81, 0x07, 0x86,
7267 0x75, 0x5e, 0x33, 0x69, 0x97, 0xe4, 0x2c, 0xa8,
7268 0x9d, 0x9f, 0x0b, 0x6a, 0xbe, 0xad, 0x98, 0xda,
7269 0x6d, 0x94, 0x41, 0xda, 0x2c, 0x1e, 0x89, 0xc4,
7270 0xc2, 0xaf, 0x1e, 0x00, 0x05, 0x0b, 0x83, 0x60,
7271 0xbd, 0x43, 0xea, 0x15, 0x23, 0x7f, 0xb9, 0xac,
7272 0xee, 0x4f, 0x2c, 0xaf, 0x2a, 0xf3, 0xdf, 0xd0,
7273 0xf3, 0x19, 0x31, 0xbb, 0x4a, 0x74, 0x84, 0x17,
7274 0x52, 0x32, 0x2c, 0x7d, 0x61, 0xe4, 0xcb, 0xeb,
7275 0x80, 0x38, 0x15, 0x52, 0xcb, 0x6f, 0xea, 0xe5,
7276 0x73, 0x9c, 0xd9, 0x24, 0x69, 0xc6, 0x95, 0x32,
7277 0x21, 0xc8, 0x11, 0xe4, 0xdc, 0x36, 0xd7, 0x93,
7278 0x38, 0x66, 0xfb, 0xb2, 0x7f, 0x3a, 0xb9, 0xaf,
7279 0x31, 0xdd, 0x93, 0x75, 0x78, 0x8a, 0x2c, 0x94,
7280 0x87, 0x1a, 0x58, 0xec, 0x9e, 0x7d, 0x4d, 0xba,
7281 0xe1, 0xe5, 0x4d, 0xfc, 0xbc, 0xa4, 0x2a, 0x14,
7282 0xef, 0xcc, 0xa7, 0xec, 0xab, 0x43, 0x09, 0x18,
7283 0xd3, 0xab, 0x68, 0xd1, 0x07, 0x99, 0x44, 0x47,
7284 0xd6, 0x83, 0x85, 0x3b, 0x30, 0xea, 0xa9, 0x6b,
7285 0x63, 0xea, 0xc4, 0x07, 0xfb, 0x43, 0x2f, 0xa4,
7286 0xaa, 0xb0, 0xab, 0x03, 0x89, 0xce, 0x3f, 0x8c,
7287 0x02, 0x7c, 0x86, 0x54, 0xbc, 0x88, 0xaf, 0x75,
7288 0xd2, 0xdc, 0x63, 0x17, 0xd3, 0x26, 0xf6, 0x96,
7289 0xa9, 0x3c, 0xf1, 0x61, 0x8c, 0x11, 0x18, 0xcc,
7290 0xd6, 0xea, 0x5b, 0xe2, 0xcd, 0xf0, 0xf1, 0xb2,
7291 0xe5, 0x35, 0x90, 0x1f, 0x85, 0x4c, 0x76, 0x5b,
7292 0x66, 0xce, 0x44, 0xa4, 0x32, 0x9f, 0xe6, 0x7b,
7293 0x71, 0x6e, 0x9f, 0x58, 0x15, 0x67, 0x72, 0x87,
7294 0x64, 0x8e, 0x3a, 0x44, 0x45, 0xd4, 0x76, 0xfa,
7295 0xc2, 0xf6, 0xef, 0x85, 0x05, 0x18, 0x7a, 0x9b,
7296 0xba, 0x41, 0x54, 0xac, 0xf0, 0xfc, 0x59, 0x12,
7297 0x3f, 0xdf, 0xa0, 0xe5, 0x8a, 0x65, 0xfd, 0x3a,
7298 0x62, 0x8d, 0x83, 0x2c, 0x03, 0xbe, 0x05, 0x76,
7299 0x2e, 0x53, 0x49, 0x97, 0x94, 0x33, 0xae, 0x40,
7300 0x81, 0x15, 0xdb, 0x6e, 0xad, 0xaa, 0xf5, 0x4b,
7301 0xe3, 0x98, 0x70, 0xdf, 0xe0, 0x7c, 0xcd, 0xdb,
7302 0x02, 0xd4, 0x7d, 0x2f, 0xc1, 0xe6, 0xb4, 0xf3,
7303 0xd7, 0x0d, 0x7a, 0xd9, 0x23, 0x9e, 0x87, 0x2d,
7304 0xce, 0x87, 0xad, 0xcc, 0x72, 0x05, 0x00, 0x29,
7305 0xdc, 0x73, 0x7f, 0x64, 0xc1, 0x15, 0x0e, 0xc2,
7306 0xdf, 0xa7, 0x5f, 0xeb, 0x41, 0xa1, 0xcd, 0xef,
7307 0x5c, 0x50, 0x79, 0x2a, 0x56, 0x56, 0x71, 0x8c,
7308 0xac, 0xc0, 0x79, 0x50, 0x69, 0xca, 0x59, 0x32,
7309 0x65, 0xf2, 0x54, 0xe4, 0x52, 0x38, 0x76, 0xd1,
7310 0x5e, 0xde, 0x26, 0x9e, 0xfb, 0x75, 0x2e, 0x11,
7311 0xb5, 0x10, 0xf4, 0x17, 0x73, 0xf5, 0x89, 0xc7,
7312 0x4f, 0x43, 0x5c, 0x8e, 0x7c, 0xb9, 0x05, 0x52,
7313 0x24, 0x40, 0x99, 0xfe, 0x9b, 0x85, 0x0b, 0x6c,
7314 0x22, 0x3e, 0x8b, 0xae, 0x86, 0xa1, 0xd2, 0x79,
7315 0x05, 0x68, 0x6b, 0xab, 0xe3, 0x41, 0x49, 0xed,
7316 0x15, 0xa1, 0x8d, 0x40, 0x2d, 0x61, 0xdf, 0x1a,
7317 0x59, 0xc9, 0x26, 0x8b, 0xef, 0x30, 0x4c, 0x88,
7318 0x4b, 0x10, 0xf8, 0x8d, 0xa6, 0x92, 0x9f, 0x4b,
7319 0xf3, 0xc4, 0x53, 0x0b, 0x89, 0x5d, 0x28, 0x92,
7320 0xcf, 0x78, 0xb2, 0xc0, 0x5d, 0xed, 0x7e, 0xfc,
7321 0xc0, 0x12, 0x23, 0x5f, 0x5a, 0x78, 0x86, 0x43,
7322 0x6e, 0x27, 0xf7, 0x5a, 0xa7, 0x6a, 0xed, 0x19,
7323 0x04, 0xf0, 0xb3, 0x12, 0xd1, 0xbd, 0x0e, 0x89,
7324 0x6e, 0xbc, 0x96, 0xa8, 0xd8, 0x49, 0x39, 0x9f,
7325 0x7e, 0x67, 0xf0, 0x2e, 0x3e, 0x01, 0xa9, 0xba,
7326 0xec, 0x8b, 0x62, 0x8e, 0xcb, 0x4a, 0x70, 0x43,
7327 0xc7, 0xc2, 0xc4, 0xca, 0x82, 0x03, 0x73, 0xe9,
7328 0x11, 0xdf, 0xcf, 0x54, 0xea, 0xc9, 0xb0, 0x95,
7329 0x51, 0xc0, 0x13, 0x3d, 0x92, 0x05, 0xfa, 0xf4,
7330 0xa9, 0x34, 0xc8, 0xce, 0x6c, 0x3d, 0x54, 0xcc,
7331 0xc4, 0xaf, 0xf1, 0xdc, 0x11, 0x44, 0x26, 0xa2,
7332 0xaf, 0xf1, 0x85, 0x75, 0x7d, 0x03, 0x61, 0x68,
7333 0x4e, 0x78, 0xc6, 0x92, 0x7d, 0x86, 0x7d, 0x77,
7334 0xdc, 0x71, 0x72, 0xdb, 0xc6, 0xae, 0xa1, 0xcb,
7335 0x70, 0x9a, 0x0b, 0x19, 0xbe, 0x4a, 0x6c, 0x2a,
7336 0xe2, 0xba, 0x6c, 0x64, 0x9a, 0x13, 0x28, 0xdf,
7337 0x85, 0x75, 0xe6, 0x43, 0xf6, 0x87, 0x08, 0x68,
7338 0x6e, 0xba, 0x6e, 0x79, 0x9f, 0x04, 0xbc, 0x23,
7339 0x50, 0xf6, 0x33, 0x5c, 0x1f, 0x24, 0x25, 0xbe,
7340 0x33, 0x47, 0x80, 0x45, 0x56, 0xa3, 0xa7, 0xd7,
7341 0x7a, 0xb1, 0x34, 0x0b, 0x90, 0x3c, 0x9c, 0xad,
7342 0x44, 0x5f, 0x9e, 0x0e, 0x9d, 0xd4, 0xbd, 0x93,
7343 0x5e, 0xfa, 0x3c, 0xe0, 0xb0, 0xd9, 0xed, 0xf3,
7344 0xd6, 0x2e, 0xff, 0x24, 0xd8, 0x71, 0x6c, 0xed,
7345 0xaf, 0x55, 0xeb, 0x22, 0xac, 0x93, 0x68, 0x32,
7346 0x05, 0x5b, 0x47, 0xdd, 0xc6, 0x4a, 0xcb, 0xc7,
7347 0x10, 0xe1, 0x3c, 0x92, 0x1a, 0xf3, 0x23, 0x78,
7348 0x2b, 0xa1, 0xd2, 0x80, 0xf4, 0x12, 0xb1, 0x20,
7349 0x8f, 0xff, 0x26, 0x35, 0xdd, 0xfb, 0xc7, 0x4e,
7350 0x78, 0xf1, 0x2d, 0x50, 0x12, 0x77, 0xa8, 0x60,
7351 0x7c, 0x0f, 0xf5, 0x16, 0x2f, 0x63, 0x70, 0x2a,
7352 0xc0, 0x96, 0x80, 0x4e, 0x0a, 0xb4, 0x93, 0x35,
7353 0x5d, 0x1d, 0x3f, 0x56, 0xf7, 0x2f, 0xbb, 0x90,
7354 0x11, 0x16, 0x8f, 0xa2, 0xec, 0x47, 0xbe, 0xac,
7355 0x56, 0x01, 0x26, 0x56, 0xb1, 0x8c, 0xb2, 0x10,
7356 0xf9, 0x1a, 0xca, 0xf5, 0xd1, 0xb7, 0x39, 0x20,
7357 0x63, 0xf1, 0x69, 0x20, 0x4f, 0x13, 0x12, 0x1f,
7358 0x5b, 0x65, 0xfc, 0x98, 0xf7, 0xc4, 0x7a, 0xbe,
7359 0xf7, 0x26, 0x4d, 0x2b, 0x84, 0x7b, 0x42, 0xad,
7360 0xd8, 0x7a, 0x0a, 0xb4, 0xd8, 0x74, 0xbf, 0xc1,
7361 0xf0, 0x6e, 0xb4, 0x29, 0xa3, 0xbb, 0xca, 0x46,
7362 0x67, 0x70, 0x6a, 0x2d, 0xce, 0x0e, 0xa2, 0x8a,
7363 0xa9, 0x87, 0xbf, 0x05, 0xc4, 0xc1, 0x04, 0xa3,
7364 0xab, 0xd4, 0x45, 0x43, 0x8c, 0xb6, 0x02, 0xb0,
7365 0x41, 0xc8, 0xfc, 0x44, 0x3d, 0x59, 0xaa, 0x2e,
7366 0x44, 0x21, 0x2a, 0x8d, 0x88, 0x9d, 0x57, 0xf4,
7367 0xa0, 0x02, 0x77, 0xb8, 0xa6, 0xa0, 0xe6, 0x75,
7368 0x5c, 0x82, 0x65, 0x3e, 0x03, 0x5c, 0x29, 0x8f,
7369 0x38, 0x55, 0xab, 0x33, 0x26, 0xef, 0x9f, 0x43,
7370 0x52, 0xfd, 0x68, 0xaf, 0x36, 0xb4, 0xbb, 0x9a,
7371 0x58, 0x09, 0x09, 0x1b, 0xc3, 0x65, 0x46, 0x46,
7372 0x1d, 0xa7, 0x94, 0x18, 0x23, 0x50, 0x2c, 0xca,
7373 0x2c, 0x55, 0x19, 0x97, 0x01, 0x9d, 0x93, 0x3b,
7374 0x63, 0x86, 0xf2, 0x03, 0x67, 0x45, 0xd2, 0x72,
7375 0x28, 0x52, 0x6c, 0xf4, 0xe3, 0x1c, 0xb5, 0x11,
7376 0x13, 0xf1, 0xeb, 0x21, 0xc7, 0xd9, 0x56, 0x82,
7377 0x2b, 0x82, 0x39, 0xbd, 0x69, 0x54, 0xed, 0x62,
7378 0xc3, 0xe2, 0xde, 0x73, 0xd4, 0x6a, 0x12, 0xae,
7379 0x13, 0x21, 0x7f, 0x4b, 0x5b, 0xfc, 0xbf, 0xe8,
7380 0x2b, 0xbe, 0x56, 0xba, 0x68, 0x8b, 0x9a, 0xb1,
7381 0x6e, 0xfa, 0xbf, 0x7e, 0x5a, 0x4b, 0xf1, 0xac,
7382 0x98, 0x65, 0x85, 0xd1, 0x93, 0x53, 0xd3, 0x7b,
7383 0x09, 0xdd, 0x4b, 0x10, 0x6d, 0x84, 0xb0, 0x13,
7384 0x65, 0xbd, 0xcf, 0x52, 0x09, 0xc4, 0x85, 0xe2,
7385 0x84, 0x74, 0x15, 0x65, 0xb7, 0xf7, 0x51, 0xaf,
7386 0x55, 0xad, 0xa4, 0xd1, 0x22, 0x54, 0x70, 0x94,
7387 0xa0, 0x1c, 0x90, 0x41, 0xfd, 0x99, 0xd7, 0x5a,
7388 0x31, 0xef, 0xaa, 0x25, 0xd0, 0x7f, 0x4f, 0xea,
7389 0x1d, 0x55, 0x42, 0xe5, 0x49, 0xb0, 0xd0, 0x46,
7390 0x62, 0x36, 0x43, 0xb2, 0x82, 0x15, 0x75, 0x50,
7391 0xa4, 0x72, 0xeb, 0x54, 0x27, 0x1f, 0x8a, 0xe4,
7392 0x7d, 0xe9, 0x66, 0xc5, 0xf1, 0x53, 0xa4, 0xd1,
7393 0x0c, 0xeb, 0xb8, 0xf8, 0xbc, 0xd4, 0xe2, 0xe7,
7394 0xe1, 0xf8, 0x4b, 0xcb, 0xa9, 0xa1, 0xaf, 0x15,
7395 0x83, 0xcb, 0x72, 0xd0, 0x33, 0x79, 0x00, 0x2d,
7396 0x9f, 0xd7, 0xf1, 0x2e, 0x1e, 0x10, 0xe4, 0x45,
7397 0xc0, 0x75, 0x3a, 0x39, 0xea, 0x68, 0xf7, 0x5d,
7398 0x1b, 0x73, 0x8f, 0xe9, 0x8e, 0x0f, 0x72, 0x47,
7399 0xae, 0x35, 0x0a, 0x31, 0x7a, 0x14, 0x4d, 0x4a,
7400 0x6f, 0x47, 0xf7, 0x7e, 0x91, 0x6e, 0x74, 0x8b,
7401 0x26, 0x47, 0xf9, 0xc3, 0xf9, 0xde, 0x70, 0xf5,
7402 0x61, 0xab, 0xa9, 0x27, 0x9f, 0x82, 0xe4, 0x9c,
7403 0x89, 0x91, 0x3f, 0x2e, 0x6a, 0xfd, 0xb5, 0x49,
7404 0xe9, 0xfd, 0x59, 0x14, 0x36, 0x49, 0x40, 0x6d,
7405 0x32, 0xd8, 0x85, 0x42, 0xf3, 0xa5, 0xdf, 0x0c,
7406 0xa8, 0x27, 0xd7, 0x54, 0xe2, 0x63, 0x2f, 0xf2,
7407 0x7e, 0x8b, 0x8b, 0xe7, 0xf1, 0x9a, 0x95, 0x35,
7408 0x43, 0xdc, 0x3a, 0xe4, 0xb6, 0xf4, 0xd0, 0xdf,
7409 0x9c, 0xcb, 0x94, 0xf3, 0x21, 0xa0, 0x77, 0x50,
7410 0xe2, 0xc6, 0xc4, 0xc6, 0x5f, 0x09, 0x64, 0x5b,
7411 0x92, 0x90, 0xd8, 0xe1, 0xd1, 0xed, 0x4b, 0x42,
7412 0xd7, 0x37, 0xaf, 0x65, 0x3d, 0x11, 0x39, 0xb6,
7413 0x24, 0x8a, 0x60, 0xae, 0xd6, 0x1e, 0xbf, 0x0e,
7414 0x0d, 0xd7, 0xdc, 0x96, 0x0e, 0x65, 0x75, 0x4e,
7415 0x29, 0x06, 0x9d, 0xa4, 0x51, 0x3a, 0x10, 0x63,
7416 0x8f, 0x17, 0x07, 0xd5, 0x8e, 0x3c, 0xf4, 0x28,
7417 0x00, 0x5a, 0x5b, 0x05, 0x19, 0xd8, 0xc0, 0x6c,
7418 0xe5, 0x15, 0xe4, 0x9c, 0x9d, 0x71, 0x9d, 0x5e,
7419 0x94, 0x29, 0x1a, 0xa7, 0x80, 0xfa, 0x0e, 0x33,
7420 0x03, 0xdd, 0xb7, 0x3e, 0x9a, 0xa9, 0x26, 0x18,
7421 0x37, 0xa9, 0x64, 0x08, 0x4d, 0x94, 0x5a, 0x88,
7422 0xca, 0x35, 0xce, 0x81, 0x02, 0xe3, 0x1f, 0x1b,
7423 0x89, 0x1a, 0x77, 0x85, 0xe3, 0x41, 0x6d, 0x32,
7424 0x42, 0x19, 0x23, 0x7d, 0xc8, 0x73, 0xee, 0x25,
7425 0x85, 0x0d, 0xf8, 0x31, 0x25, 0x79, 0x1b, 0x6f,
7426 0x79, 0x25, 0xd2, 0xd8, 0xd4, 0x23, 0xfd, 0xf7,
7427 0x82, 0x36, 0x6a, 0x0c, 0x46, 0x22, 0x15, 0xe9,
7428 0xff, 0x72, 0x41, 0x91, 0x91, 0x7d, 0x3a, 0xb7,
7429 0xdd, 0x65, 0x99, 0x70, 0xf6, 0x8d, 0x84, 0xf8,
7430 0x67, 0x15, 0x20, 0x11, 0xd6, 0xb2, 0x55, 0x7b,
7431 0xdb, 0x87, 0xee, 0xef, 0x55, 0x89, 0x2a, 0x59,
7432 0x2b, 0x07, 0x8f, 0x43, 0x8a, 0x59, 0x3c, 0x01,
7433 0x8b, 0x65, 0x54, 0xa1, 0x66, 0xd5, 0x38, 0xbd,
7434 0xc6, 0x30, 0xa9, 0xcc, 0x49, 0xb6, 0xa8, 0x1b,
7435 0xb8, 0xc0, 0x0e, 0xe3, 0x45, 0x28, 0xe2, 0xff,
7436 0x41, 0x9f, 0x7e, 0x7c, 0xd1, 0xae, 0x9e, 0x25,
7437 0x3f, 0x4c, 0x7c, 0x7c, 0xf4, 0xa8, 0x26, 0x4d,
7438 0x5c, 0xfd, 0x4b, 0x27, 0x18, 0xf9, 0x61, 0x76,
7439 0x48, 0xba, 0x0c, 0x6b, 0xa9, 0x4d, 0xfc, 0xf5,
7440 0x3b, 0x35, 0x7e, 0x2f, 0x4a, 0xa9, 0xc2, 0x9a,
7441 0xae, 0xab, 0x86, 0x09, 0x89, 0xc9, 0xc2, 0x40,
7442 0x39, 0x2c, 0x81, 0xb3, 0xb8, 0x17, 0x67, 0xc2,
7443 0x0d, 0x32, 0x4a, 0x3a, 0x67, 0x81, 0xd7, 0x1a,
7444 0x34, 0x52, 0xc5, 0xdb, 0x0a, 0xf5, 0x63, 0x39,
7445 0xea, 0x1f, 0xe1, 0x7c, 0xa1, 0x9e, 0xc1, 0x35,
7446 0xe3, 0xb1, 0x18, 0x45, 0x67, 0xf9, 0x22, 0x38,
7447 0x95, 0xd9, 0x34, 0x34, 0x86, 0xc6, 0x41, 0x94,
7448 0x15, 0xf9, 0x5b, 0x41, 0xa6, 0x87, 0x8b, 0xf8,
7449 0xd5, 0xe1, 0x1b, 0xe2, 0x5b, 0xf3, 0x86, 0x10,
7450 0xff, 0xe6, 0xae, 0x69, 0x76, 0xbc, 0x0d, 0xb4,
7451 0x09, 0x90, 0x0c, 0xa2, 0x65, 0x0c, 0xad, 0x74,
7452 0xf5, 0xd7, 0xff, 0xda, 0xc1, 0xce, 0x85, 0xbe,
7453 0x00, 0xa7, 0xff, 0x4d, 0x2f, 0x65, 0xd3, 0x8c,
7454 0x86, 0x2d, 0x05, 0xe8, 0xed, 0x3e, 0x6b, 0x8b,
7455 0x0f, 0x3d, 0x83, 0x8c, 0xf1, 0x1d, 0x5b, 0x96,
7456 0x2e, 0xb1, 0x9c, 0xc2, 0x98, 0xe1, 0x70, 0xb9,
7457 0xba, 0x5c, 0x8a, 0x43, 0xd6, 0x34, 0xa7, 0x2d,
7458 0xc9, 0x92, 0xae, 0xf2, 0xa5, 0x7b, 0x05, 0x49,
7459 0xa7, 0x33, 0x34, 0x86, 0xca, 0xe4, 0x96, 0x23,
7460 0x76, 0x5b, 0xf2, 0xc6, 0xf1, 0x51, 0x28, 0x42,
7461 0x7b, 0xcc, 0x76, 0x8f, 0xfa, 0xa2, 0xad, 0x31,
7462 0xd4, 0xd6, 0x7a, 0x6d, 0x25, 0x25, 0x54, 0xe4,
7463 0x3f, 0x50, 0x59, 0xe1, 0x5c, 0x05, 0xb7, 0x27,
7464 0x48, 0xbf, 0x07, 0xec, 0x1b, 0x13, 0xbe, 0x2b,
7465 0xa1, 0x57, 0x2b, 0xd5, 0xab, 0xd7, 0xd0, 0x4c,
7466 0x1e, 0xcb, 0x71, 0x9b, 0xc5, 0x90, 0x85, 0xd3,
7467 0xde, 0x59, 0xec, 0x71, 0xeb, 0x89, 0xbb, 0xd0,
7468 0x09, 0x50, 0xe1, 0x16, 0x3f, 0xfd, 0x1c, 0x34,
7469 0xc3, 0x1c, 0xa1, 0x10, 0x77, 0x53, 0x98, 0xef,
7470 0xf2, 0xfd, 0xa5, 0x01, 0x59, 0xc2, 0x9b, 0x26,
7471 0xc7, 0x42, 0xd9, 0x49, 0xda, 0x58, 0x2b, 0x6e,
7472 0x9f, 0x53, 0x19, 0x76, 0x7e, 0xd9, 0xc9, 0x0e,
7473 0x68, 0xc8, 0x7f, 0x51, 0x22, 0x42, 0xef, 0x49,
7474 0xa4, 0x55, 0xb6, 0x36, 0xac, 0x09, 0xc7, 0x31,
7475 0x88, 0x15, 0x4b, 0x2e, 0x8f, 0x3a, 0x08, 0xf7,
7476 0xd8, 0xf7, 0xa8, 0xc5, 0xa9, 0x33, 0xa6, 0x45,
7477 0xe4, 0xc4, 0x94, 0x76, 0xf3, 0x0d, 0x8f, 0x7e,
7478 0xc8, 0xf6, 0xbc, 0x23, 0x0a, 0xb6, 0x4c, 0xd3,
7479 0x6a, 0xcd, 0x36, 0xc2, 0x90, 0x5c, 0x5c, 0x3c,
7480 0x65, 0x7b, 0xc2, 0xd6, 0xcc, 0xe6, 0x0d, 0x87,
7481 0x73, 0x2e, 0x71, 0x79, 0x16, 0x06, 0x63, 0x28,
7482 0x09, 0x15, 0xd8, 0x89, 0x38, 0x38, 0x3d, 0xb5,
7483 0x42, 0x1c, 0x08, 0x24, 0xf7, 0x2a, 0xd2, 0x9d,
7484 0xc8, 0xca, 0xef, 0xf9, 0x27, 0xd8, 0x07, 0x86,
7485 0xf7, 0x43, 0x0b, 0x55, 0x15, 0x3f, 0x9f, 0x83,
7486 0xef, 0xdc, 0x49, 0x9d, 0x2a, 0xc1, 0x54, 0x62,
7487 0xbd, 0x9b, 0x66, 0x55, 0x9f, 0xb7, 0x12, 0xf3,
7488 0x1b, 0x4d, 0x9d, 0x2a, 0x5c, 0xed, 0x87, 0x75,
7489 0x87, 0x26, 0xec, 0x61, 0x2c, 0xb4, 0x0f, 0x89,
7490 0xb0, 0xfb, 0x2e, 0x68, 0x5d, 0x15, 0xc7, 0x8d,
7491 0x2e, 0xc0, 0xd9, 0xec, 0xaf, 0x4f, 0xd2, 0x25,
7492 0x29, 0xe8, 0xd2, 0x26, 0x2b, 0x67, 0xe9, 0xfc,
7493 0x2b, 0xa8, 0x67, 0x96, 0x12, 0x1f, 0x5b, 0x96,
7494 0xc6, 0x14, 0x53, 0xaf, 0x44, 0xea, 0xd6, 0xe2,
7495 0x94, 0x98, 0xe4, 0x12, 0x93, 0x4c, 0x92, 0xe0,
7496 0x18, 0xa5, 0x8d, 0x2d, 0xe4, 0x71, 0x3c, 0x47,
7497 0x4c, 0xf7, 0xe6, 0x47, 0x9e, 0xc0, 0x68, 0xdf,
7498 0xd4, 0xf5, 0x5a, 0x74, 0xb1, 0x2b, 0x29, 0x03,
7499 0x19, 0x07, 0xaf, 0x90, 0x62, 0x5c, 0x68, 0x98,
7500 0x48, 0x16, 0x11, 0x02, 0x9d, 0xee, 0xb4, 0x9b,
7501 0xe5, 0x42, 0x7f, 0x08, 0xfd, 0x16, 0x32, 0x0b,
7502 0xd0, 0xb3, 0xfa, 0x2b, 0xb7, 0x99, 0xf9, 0x29,
7503 0xcd, 0x20, 0x45, 0x9f, 0xb3, 0x1a, 0x5d, 0xa2,
7504 0xaf, 0x4d, 0xe0, 0xbd, 0x42, 0x0d, 0xbc, 0x74,
7505 0x99, 0x9c, 0x8e, 0x53, 0x1a, 0xb4, 0x3e, 0xbd,
7506 0xa2, 0x9a, 0x2d, 0xf7, 0xf8, 0x39, 0x0f, 0x67,
7507 0x63, 0xfc, 0x6b, 0xc0, 0xaf, 0xb3, 0x4b, 0x4f,
7508 0x55, 0xc4, 0xcf, 0xa7, 0xc8, 0x04, 0x11, 0x3e,
7509 0x14, 0x32, 0xbb, 0x1b, 0x38, 0x77, 0xd6, 0x7f,
7510 0x54, 0x4c, 0xdf, 0x75, 0xf3, 0x07, 0x2d, 0x33,
7511 0x9b, 0xa8, 0x20, 0xe1, 0x7b, 0x12, 0xb5, 0xf3,
7512 0xef, 0x2f, 0xce, 0x72, 0xe5, 0x24, 0x60, 0xc1,
7513 0x30, 0xe2, 0xab, 0xa1, 0x8e, 0x11, 0x09, 0xa8,
7514 0x21, 0x33, 0x44, 0xfe, 0x7f, 0x35, 0x32, 0x93,
7515 0x39, 0xa7, 0xad, 0x8b, 0x79, 0x06, 0xb2, 0xcb,
7516 0x4e, 0xa9, 0x5f, 0xc7, 0xba, 0x74, 0x29, 0xec,
7517 0x93, 0xa0, 0x4e, 0x54, 0x93, 0xc0, 0xbc, 0x55,
7518 0x64, 0xf0, 0x48, 0xe5, 0x57, 0x99, 0xee, 0x75,
7519 0xd6, 0x79, 0x0f, 0x66, 0xb7, 0xc6, 0x57, 0x76,
7520 0xf7, 0xb7, 0xf3, 0x9c, 0xc5, 0x60, 0xe8, 0x7f,
7521 0x83, 0x76, 0xd6, 0x0e, 0xaa, 0xe6, 0x90, 0x39,
7522 0x1d, 0xa6, 0x32, 0x6a, 0x34, 0xe3, 0x55, 0xf8,
7523 0x58, 0xa0, 0x58, 0x7d, 0x33, 0xe0, 0x22, 0x39,
7524 0x44, 0x64, 0x87, 0x86, 0x5a, 0x2f, 0xa7, 0x7e,
7525 0x0f, 0x38, 0xea, 0xb0, 0x30, 0xcc, 0x61, 0xa5,
7526 0x6a, 0x32, 0xae, 0x1e, 0xf7, 0xe9, 0xd0, 0xa9,
7527 0x0c, 0x32, 0x4b, 0xb5, 0x49, 0x28, 0xab, 0x85,
7528 0x2f, 0x8e, 0x01, 0x36, 0x38, 0x52, 0xd0, 0xba,
7529 0xd6, 0x02, 0x78, 0xf8, 0x0e, 0x3e, 0x9c, 0x8b,
7530 0x6b, 0x45, 0x99, 0x3f, 0x5c, 0xfe, 0x58, 0xf1,
7531 0x5c, 0x94, 0x04, 0xe1, 0xf5, 0x18, 0x6d, 0x51,
7532 0xb2, 0x5d, 0x18, 0x20, 0xb6, 0xc2, 0x9a, 0x42,
7533 0x1d, 0xb3, 0xab, 0x3c, 0xb6, 0x3a, 0x13, 0x03,
7534 0xb2, 0x46, 0x82, 0x4f, 0xfc, 0x64, 0xbc, 0x4f,
7535 0xca, 0xfa, 0x9c, 0xc0, 0xd5, 0xa7, 0xbd, 0x11,
7536 0xb7, 0xe4, 0x5a, 0xf6, 0x6f, 0x4d, 0x4d, 0x54,
7537 0xea, 0xa4, 0x98, 0x66, 0xd4, 0x22, 0x3b, 0xd3,
7538 0x8f, 0x34, 0x47, 0xd9, 0x7c, 0xf4, 0x72, 0x3b,
7539 0x4d, 0x02, 0x77, 0xf6, 0xd6, 0xdd, 0x08, 0x0a,
7540 0x81, 0xe1, 0x86, 0x89, 0x3e, 0x56, 0x10, 0x3c,
7541 0xba, 0xd7, 0x81, 0x8c, 0x08, 0xbc, 0x8b, 0xe2,
7542 0x53, 0xec, 0xa7, 0x89, 0xee, 0xc8, 0x56, 0xb5,
7543 0x36, 0x2c, 0xb2, 0x03, 0xba, 0x99, 0xdd, 0x7c,
7544 0x48, 0xa0, 0xb0, 0xbc, 0x91, 0x33, 0xe9, 0xa8,
7545 0xcb, 0xcd, 0xcf, 0x59, 0x5f, 0x1f, 0x15, 0xe2,
7546 0x56, 0xf5, 0x4e, 0x01, 0x35, 0x27, 0x45, 0x77,
7547 0x47, 0xc8, 0xbc, 0xcb, 0x7e, 0x39, 0xc1, 0x97,
7548 0x28, 0xd3, 0x84, 0xfc, 0x2c, 0x3e, 0xc8, 0xad,
7549 0x9c, 0xf8, 0x8a, 0x61, 0x9c, 0x28, 0xaa, 0xc5,
7550 0x99, 0x20, 0x43, 0x85, 0x9d, 0xa5, 0xe2, 0x8b,
7551 0xb8, 0xae, 0xeb, 0xd0, 0x32, 0x0d, 0x52, 0x78,
7552 0x09, 0x56, 0x3f, 0xc7, 0xd8, 0x7e, 0x26, 0xfc,
7553 0x37, 0xfb, 0x6f, 0x04, 0xfc, 0xfa, 0x92, 0x10,
7554 0xac, 0xf8, 0x3e, 0x21, 0xdc, 0x8c, 0x21, 0x16,
7555 0x7d, 0x67, 0x6e, 0xf6, 0xcd, 0xda, 0xb6, 0x98,
7556 0x23, 0xab, 0x23, 0x3c, 0xb2, 0x10, 0xa0, 0x53,
7557 0x5a, 0x56, 0x9f, 0xc5, 0xd0, 0xff, 0xbb, 0xe4,
7558 0x98, 0x3c, 0x69, 0x1e, 0xdb, 0x38, 0x8f, 0x7e,
7559 0x0f, 0xd2, 0x98, 0x88, 0x81, 0x8b, 0x45, 0x67,
7560 0xea, 0x33, 0xf1, 0xeb, 0xe9, 0x97, 0x55, 0x2e,
7561 0xd9, 0xaa, 0xeb, 0x5a, 0xec, 0xda, 0xe1, 0x68,
7562 0xa8, 0x9d, 0x3c, 0x84, 0x7c, 0x05, 0x3d, 0x62,
7563 0x87, 0x8f, 0x03, 0x21, 0x28, 0x95, 0x0c, 0x89,
7564 0x25, 0x22, 0x4a, 0xb0, 0x93, 0xa9, 0x50, 0xa2,
7565 0x2f, 0x57, 0x6e, 0x18, 0x42, 0x19, 0x54, 0x0c,
7566 0x55, 0x67, 0xc6, 0x11, 0x49, 0xf4, 0x5c, 0xd2,
7567 0xe9, 0x3d, 0xdd, 0x8b, 0x48, 0x71, 0x21, 0x00,
7568 0xc3, 0x9a, 0x6c, 0x85, 0x74, 0x28, 0x83, 0x4a,
7569 0x1b, 0x31, 0x05, 0xe1, 0x06, 0x92, 0xe7, 0xda,
7570 0x85, 0x73, 0x78, 0x45, 0x20, 0x7f, 0xae, 0x13,
7571 0x7c, 0x33, 0x06, 0x22, 0xf4, 0x83, 0xf9, 0x35,
7572 0x3f, 0x6c, 0x71, 0xa8, 0x4e, 0x48, 0xbe, 0x9b,
7573 0xce, 0x8a, 0xba, 0xda, 0xbe, 0x28, 0x08, 0xf7,
7574 0xe2, 0x14, 0x8c, 0x71, 0xea, 0x72, 0xf9, 0x33,
7575 0xf2, 0x88, 0x3f, 0xd7, 0xbb, 0x69, 0x6c, 0x29,
7576 0x19, 0xdc, 0x84, 0xce, 0x1f, 0x12, 0x4f, 0xc8,
7577 0xaf, 0xa5, 0x04, 0xba, 0x5a, 0xab, 0xb0, 0xd9,
7578 0x14, 0x1f, 0x6c, 0x68, 0x98, 0x39, 0x89, 0x7a,
7579 0xd9, 0xd8, 0x2f, 0xdf, 0xa8, 0x47, 0x4a, 0x25,
7580 0xe2, 0xfb, 0x33, 0xf4, 0x59, 0x78, 0xe1, 0x68,
7581 0x85, 0xcf, 0xfe, 0x59, 0x20, 0xd4, 0x05, 0x1d,
7582 0x80, 0x99, 0xae, 0xbc, 0xca, 0xae, 0x0f, 0x2f,
7583 0x65, 0x43, 0x34, 0x8e, 0x7e, 0xac, 0xd3, 0x93,
7584 0x2f, 0xac, 0x6d, 0x14, 0x3d, 0x02, 0x07, 0x70,
7585 0x9d, 0xa4, 0xf3, 0x1b, 0x5c, 0x36, 0xfc, 0x01,
7586 0x73, 0x34, 0x85, 0x0c, 0x6c, 0xd6, 0xf1, 0xbd,
7587 0x3f, 0xdf, 0xee, 0xf5, 0xd9, 0xba, 0x56, 0xef,
7588 0xf4, 0x9b, 0x6b, 0xee, 0x9f, 0x5a, 0x78, 0x6d,
7589 0x32, 0x19, 0xf4, 0xf7, 0xf8, 0x4c, 0x69, 0x0b,
7590 0x4b, 0xbc, 0xbb, 0xb7, 0xf2, 0x85, 0xaf, 0x70,
7591 0x75, 0x24, 0x6c, 0x54, 0xa7, 0x0e, 0x4d, 0x1d,
7592 0x01, 0xbf, 0x08, 0xac, 0xcf, 0x7f, 0x2c, 0xe3,
7593 0x14, 0x89, 0x5e, 0x70, 0x5a, 0x99, 0x92, 0xcd,
7594 0x01, 0x84, 0xc8, 0xd2, 0xab, 0xe5, 0x4f, 0x58,
7595 0xe7, 0x0f, 0x2f, 0x0e, 0xff, 0x68, 0xea, 0xfd,
7596 0x15, 0xb3, 0x17, 0xe6, 0xb0, 0xe7, 0x85, 0xd8,
7597 0x23, 0x2e, 0x05, 0xc7, 0xc9, 0xc4, 0x46, 0x1f,
7598 0xe1, 0x9e, 0x49, 0x20, 0x23, 0x24, 0x4d, 0x7e,
7599 0x29, 0x65, 0xff, 0xf4, 0xb6, 0xfd, 0x1a, 0x85,
7600 0xc4, 0x16, 0xec, 0xfc, 0xea, 0x7b, 0xd6, 0x2c,
7601 0x43, 0xf8, 0xb7, 0xbf, 0x79, 0xc0, 0x85, 0xcd,
7602 0xef, 0xe1, 0x98, 0xd3, 0xa5, 0xf7, 0x90, 0x8c,
7603 0xe9, 0x7f, 0x80, 0x6b, 0xd2, 0xac, 0x4c, 0x30,
7604 0xa7, 0xc6, 0x61, 0x6c, 0xd2, 0xf9, 0x2c, 0xff,
7605 0x30, 0xbc, 0x22, 0x81, 0x7d, 0x93, 0x12, 0xe4,
7606 0x0a, 0xcd, 0xaf, 0xdd, 0xe8, 0xab, 0x0a, 0x1e,
7607 0x13, 0xa4, 0x27, 0xc3, 0x5f, 0xf7, 0x4b, 0xbb,
7608 0x37, 0x09, 0x4b, 0x91, 0x6f, 0x92, 0x4f, 0xaf,
7609 0x52, 0xee, 0xdf, 0xef, 0x09, 0x6f, 0xf7, 0x5c,
7610 0x6e, 0x12, 0x17, 0x72, 0x63, 0x57, 0xc7, 0xba,
7611 0x3b, 0x6b, 0x38, 0x32, 0x73, 0x1b, 0x9c, 0x80,
7612 0xc1, 0x7a, 0xc6, 0xcf, 0xcd, 0x35, 0xc0, 0x6b,
7613 0x31, 0x1a, 0x6b, 0xe9, 0xd8, 0x2c, 0x29, 0x3f,
7614 0x96, 0xfb, 0xb6, 0xcd, 0x13, 0x91, 0x3b, 0xc2,
7615 0xd2, 0xa3, 0x31, 0x8d, 0xa4, 0xcd, 0x57, 0xcd,
7616 0x13, 0x3d, 0x64, 0xfd, 0x06, 0xce, 0xe6, 0xdc,
7617 0x0c, 0x24, 0x43, 0x31, 0x40, 0x57, 0xf1, 0x72,
7618 0x17, 0xe3, 0x3a, 0x63, 0x6d, 0x35, 0xcf, 0x5d,
7619 0x97, 0x40, 0x59, 0xdd, 0xf7, 0x3c, 0x02, 0xf7,
7620 0x1c, 0x7e, 0x05, 0xbb, 0xa9, 0x0d, 0x01, 0xb1,
7621 0x8e, 0xc0, 0x30, 0xa9, 0x53, 0x24, 0xc9, 0x89,
7622 0x84, 0x6d, 0xaa, 0xd0, 0xcd, 0x91, 0xc2, 0x4d,
7623 0x91, 0xb0, 0x89, 0xe2, 0xbf, 0x83, 0x44, 0xaa,
7624 0x28, 0x72, 0x23, 0xa0, 0xc2, 0xad, 0xad, 0x1c,
7625 0xfc, 0x3f, 0x09, 0x7a, 0x0b, 0xdc, 0xc5, 0x1b,
7626 0x87, 0x13, 0xc6, 0x5b, 0x59, 0x8d, 0xf2, 0xc8,
7627 0xaf, 0xdf, 0x11, 0x95,
7628 },
7629 .rlen = 4100,
7630 },
7631};
7632
4320/* 7633/*
4321 * Compression stuff. 7634 * Compression stuff.
4322 */ 7635 */
@@ -4408,6 +7721,88 @@ static struct comp_testvec deflate_decomp_tv_template[] = {
4408}; 7721};
4409 7722
4410/* 7723/*
7724 * LZO test vectors (null-terminated strings).
7725 */
7726#define LZO_COMP_TEST_VECTORS 2
7727#define LZO_DECOMP_TEST_VECTORS 2
7728
7729static struct comp_testvec lzo_comp_tv_template[] = {
7730 {
7731 .inlen = 70,
7732 .outlen = 46,
7733 .input = "Join us now and share the software "
7734 "Join us now and share the software ",
7735 .output = { 0x00, 0x0d, 0x4a, 0x6f, 0x69, 0x6e, 0x20, 0x75,
7736 0x73, 0x20, 0x6e, 0x6f, 0x77, 0x20, 0x61, 0x6e,
7737 0x64, 0x20, 0x73, 0x68, 0x61, 0x72, 0x65, 0x20,
7738 0x74, 0x68, 0x65, 0x20, 0x73, 0x6f, 0x66, 0x74,
7739 0x77, 0x70, 0x01, 0x01, 0x4a, 0x6f, 0x69, 0x6e,
7740 0x3d, 0x88, 0x00, 0x11, 0x00, 0x00 },
7741 }, {
7742 .inlen = 159,
7743 .outlen = 133,
7744 .input = "This document describes a compression method based on the LZO "
7745 "compression algorithm. This document defines the application of "
7746 "the LZO algorithm used in UBIFS.",
7747 .output = { 0x00, 0x2b, 0x54, 0x68, 0x69, 0x73, 0x20, 0x64,
7748 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x20,
7749 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65,
7750 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70,
7751 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20,
7752 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x20, 0x62,
7753 0x61, 0x73, 0x65, 0x64, 0x20, 0x6f, 0x6e, 0x20,
7754 0x74, 0x68, 0x65, 0x20, 0x4c, 0x5a, 0x4f, 0x2b,
7755 0x8c, 0x00, 0x0d, 0x61, 0x6c, 0x67, 0x6f, 0x72,
7756 0x69, 0x74, 0x68, 0x6d, 0x2e, 0x20, 0x20, 0x54,
7757 0x68, 0x69, 0x73, 0x2a, 0x54, 0x01, 0x02, 0x66,
7758 0x69, 0x6e, 0x65, 0x73, 0x94, 0x06, 0x05, 0x61,
7759 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x76,
7760 0x0a, 0x6f, 0x66, 0x88, 0x02, 0x60, 0x09, 0x27,
7761 0xf0, 0x00, 0x0c, 0x20, 0x75, 0x73, 0x65, 0x64,
7762 0x20, 0x69, 0x6e, 0x20, 0x55, 0x42, 0x49, 0x46,
7763 0x53, 0x2e, 0x11, 0x00, 0x00 },
7764 },
7765};
7766
7767static struct comp_testvec lzo_decomp_tv_template[] = {
7768 {
7769 .inlen = 133,
7770 .outlen = 159,
7771 .input = { 0x00, 0x2b, 0x54, 0x68, 0x69, 0x73, 0x20, 0x64,
7772 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x20,
7773 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65,
7774 0x73, 0x20, 0x61, 0x20, 0x63, 0x6f, 0x6d, 0x70,
7775 0x72, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20,
7776 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x20, 0x62,
7777 0x61, 0x73, 0x65, 0x64, 0x20, 0x6f, 0x6e, 0x20,
7778 0x74, 0x68, 0x65, 0x20, 0x4c, 0x5a, 0x4f, 0x2b,
7779 0x8c, 0x00, 0x0d, 0x61, 0x6c, 0x67, 0x6f, 0x72,
7780 0x69, 0x74, 0x68, 0x6d, 0x2e, 0x20, 0x20, 0x54,
7781 0x68, 0x69, 0x73, 0x2a, 0x54, 0x01, 0x02, 0x66,
7782 0x69, 0x6e, 0x65, 0x73, 0x94, 0x06, 0x05, 0x61,
7783 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x76,
7784 0x0a, 0x6f, 0x66, 0x88, 0x02, 0x60, 0x09, 0x27,
7785 0xf0, 0x00, 0x0c, 0x20, 0x75, 0x73, 0x65, 0x64,
7786 0x20, 0x69, 0x6e, 0x20, 0x55, 0x42, 0x49, 0x46,
7787 0x53, 0x2e, 0x11, 0x00, 0x00 },
7788 .output = "This document describes a compression method based on the LZO "
7789 "compression algorithm. This document defines the application of "
7790 "the LZO algorithm used in UBIFS.",
7791 }, {
7792 .inlen = 46,
7793 .outlen = 70,
7794 .input = { 0x00, 0x0d, 0x4a, 0x6f, 0x69, 0x6e, 0x20, 0x75,
7795 0x73, 0x20, 0x6e, 0x6f, 0x77, 0x20, 0x61, 0x6e,
7796 0x64, 0x20, 0x73, 0x68, 0x61, 0x72, 0x65, 0x20,
7797 0x74, 0x68, 0x65, 0x20, 0x73, 0x6f, 0x66, 0x74,
7798 0x77, 0x70, 0x01, 0x01, 0x4a, 0x6f, 0x69, 0x6e,
7799 0x3d, 0x88, 0x00, 0x11, 0x00, 0x00 },
7800 .output = "Join us now and share the software "
7801 "Join us now and share the software ",
7802 },
7803};
7804
7805/*
4411 * Michael MIC test vectors from IEEE 802.11i 7806 * Michael MIC test vectors from IEEE 802.11i
4412 */ 7807 */
4413#define MICHAEL_MIC_TEST_VECTORS 6 7808#define MICHAEL_MIC_TEST_VECTORS 6
@@ -4812,4 +8207,20 @@ static struct cipher_speed camellia_speed_template[] = {
4812 { .klen = 0, .blen = 0, } 8207 { .klen = 0, .blen = 0, }
4813}; 8208};
4814 8209
8210static struct cipher_speed salsa20_speed_template[] = {
8211 { .klen = 16, .blen = 16, },
8212 { .klen = 16, .blen = 64, },
8213 { .klen = 16, .blen = 256, },
8214 { .klen = 16, .blen = 1024, },
8215 { .klen = 16, .blen = 8192, },
8216 { .klen = 32, .blen = 16, },
8217 { .klen = 32, .blen = 64, },
8218 { .klen = 32, .blen = 256, },
8219 { .klen = 32, .blen = 1024, },
8220 { .klen = 32, .blen = 8192, },
8221
8222 /* End marker */
8223 { .klen = 0, .blen = 0, }
8224};
8225
4815#endif /* _CRYPTO_TCRYPT_H */ 8226#endif /* _CRYPTO_TCRYPT_H */
diff --git a/crypto/twofish_common.c b/crypto/twofish_common.c
index b4b9c0c3f4ae..0af216c75d7e 100644
--- a/crypto/twofish_common.c
+++ b/crypto/twofish_common.c
@@ -655,84 +655,48 @@ int twofish_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int key_len)
655 CALC_SB256_2( i, calc_sb_tbl[j], calc_sb_tbl[k] ); 655 CALC_SB256_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
656 } 656 }
657 657
658 /* Calculate whitening and round subkeys. The constants are 658 /* CALC_K256/CALC_K192/CALC_K loops were unrolled.
659 * indices of subkeys, preprocessed through q0 and q1. */ 659 * Unrolling produced x2.5 more code (+18k on i386),
660 CALC_K256 (w, 0, 0xA9, 0x75, 0x67, 0xF3); 660 * and speeded up key setup by 7%:
661 CALC_K256 (w, 2, 0xB3, 0xC6, 0xE8, 0xF4); 661 * unrolled: twofish_setkey/sec: 41128
662 CALC_K256 (w, 4, 0x04, 0xDB, 0xFD, 0x7B); 662 * loop: twofish_setkey/sec: 38148
663 CALC_K256 (w, 6, 0xA3, 0xFB, 0x76, 0xC8); 663 * CALC_K256: ~100 insns each
664 CALC_K256 (k, 0, 0x9A, 0x4A, 0x92, 0xD3); 664 * CALC_K192: ~90 insns
665 CALC_K256 (k, 2, 0x80, 0xE6, 0x78, 0x6B); 665 * CALC_K: ~70 insns
666 CALC_K256 (k, 4, 0xE4, 0x45, 0xDD, 0x7D); 666 */
667 CALC_K256 (k, 6, 0xD1, 0xE8, 0x38, 0x4B); 667 /* Calculate whitening and round subkeys */
668 CALC_K256 (k, 8, 0x0D, 0xD6, 0xC6, 0x32); 668 for ( i = 0; i < 8; i += 2 ) {
669 CALC_K256 (k, 10, 0x35, 0xD8, 0x98, 0xFD); 669 CALC_K256 (w, i, q0[i], q1[i], q0[i+1], q1[i+1]);
670 CALC_K256 (k, 12, 0x18, 0x37, 0xF7, 0x71); 670 }
671 CALC_K256 (k, 14, 0xEC, 0xF1, 0x6C, 0xE1); 671 for ( i = 0; i < 32; i += 2 ) {
672 CALC_K256 (k, 16, 0x43, 0x30, 0x75, 0x0F); 672 CALC_K256 (k, i, q0[i+8], q1[i+8], q0[i+9], q1[i+9]);
673 CALC_K256 (k, 18, 0x37, 0xF8, 0x26, 0x1B); 673 }
674 CALC_K256 (k, 20, 0xFA, 0x87, 0x13, 0xFA);
675 CALC_K256 (k, 22, 0x94, 0x06, 0x48, 0x3F);
676 CALC_K256 (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
677 CALC_K256 (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
678 CALC_K256 (k, 28, 0x84, 0x8A, 0x54, 0x00);
679 CALC_K256 (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
680 } else if (key_len == 24) { /* 192-bit key */ 674 } else if (key_len == 24) { /* 192-bit key */
681 /* Compute the S-boxes. */ 675 /* Compute the S-boxes. */
682 for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) { 676 for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) {
683 CALC_SB192_2( i, calc_sb_tbl[j], calc_sb_tbl[k] ); 677 CALC_SB192_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
684 } 678 }
685 679
686 /* Calculate whitening and round subkeys. The constants are 680 /* Calculate whitening and round subkeys */
687 * indices of subkeys, preprocessed through q0 and q1. */ 681 for ( i = 0; i < 8; i += 2 ) {
688 CALC_K192 (w, 0, 0xA9, 0x75, 0x67, 0xF3); 682 CALC_K192 (w, i, q0[i], q1[i], q0[i+1], q1[i+1]);
689 CALC_K192 (w, 2, 0xB3, 0xC6, 0xE8, 0xF4); 683 }
690 CALC_K192 (w, 4, 0x04, 0xDB, 0xFD, 0x7B); 684 for ( i = 0; i < 32; i += 2 ) {
691 CALC_K192 (w, 6, 0xA3, 0xFB, 0x76, 0xC8); 685 CALC_K192 (k, i, q0[i+8], q1[i+8], q0[i+9], q1[i+9]);
692 CALC_K192 (k, 0, 0x9A, 0x4A, 0x92, 0xD3); 686 }
693 CALC_K192 (k, 2, 0x80, 0xE6, 0x78, 0x6B);
694 CALC_K192 (k, 4, 0xE4, 0x45, 0xDD, 0x7D);
695 CALC_K192 (k, 6, 0xD1, 0xE8, 0x38, 0x4B);
696 CALC_K192 (k, 8, 0x0D, 0xD6, 0xC6, 0x32);
697 CALC_K192 (k, 10, 0x35, 0xD8, 0x98, 0xFD);
698 CALC_K192 (k, 12, 0x18, 0x37, 0xF7, 0x71);
699 CALC_K192 (k, 14, 0xEC, 0xF1, 0x6C, 0xE1);
700 CALC_K192 (k, 16, 0x43, 0x30, 0x75, 0x0F);
701 CALC_K192 (k, 18, 0x37, 0xF8, 0x26, 0x1B);
702 CALC_K192 (k, 20, 0xFA, 0x87, 0x13, 0xFA);
703 CALC_K192 (k, 22, 0x94, 0x06, 0x48, 0x3F);
704 CALC_K192 (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
705 CALC_K192 (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
706 CALC_K192 (k, 28, 0x84, 0x8A, 0x54, 0x00);
707 CALC_K192 (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
708 } else { /* 128-bit key */ 687 } else { /* 128-bit key */
709 /* Compute the S-boxes. */ 688 /* Compute the S-boxes. */
710 for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) { 689 for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) {
711 CALC_SB_2( i, calc_sb_tbl[j], calc_sb_tbl[k] ); 690 CALC_SB_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
712 } 691 }
713 692
714 /* Calculate whitening and round subkeys. The constants are 693 /* Calculate whitening and round subkeys */
715 * indices of subkeys, preprocessed through q0 and q1. */ 694 for ( i = 0; i < 8; i += 2 ) {
716 CALC_K (w, 0, 0xA9, 0x75, 0x67, 0xF3); 695 CALC_K (w, i, q0[i], q1[i], q0[i+1], q1[i+1]);
717 CALC_K (w, 2, 0xB3, 0xC6, 0xE8, 0xF4); 696 }
718 CALC_K (w, 4, 0x04, 0xDB, 0xFD, 0x7B); 697 for ( i = 0; i < 32; i += 2 ) {
719 CALC_K (w, 6, 0xA3, 0xFB, 0x76, 0xC8); 698 CALC_K (k, i, q0[i+8], q1[i+8], q0[i+9], q1[i+9]);
720 CALC_K (k, 0, 0x9A, 0x4A, 0x92, 0xD3); 699 }
721 CALC_K (k, 2, 0x80, 0xE6, 0x78, 0x6B);
722 CALC_K (k, 4, 0xE4, 0x45, 0xDD, 0x7D);
723 CALC_K (k, 6, 0xD1, 0xE8, 0x38, 0x4B);
724 CALC_K (k, 8, 0x0D, 0xD6, 0xC6, 0x32);
725 CALC_K (k, 10, 0x35, 0xD8, 0x98, 0xFD);
726 CALC_K (k, 12, 0x18, 0x37, 0xF7, 0x71);
727 CALC_K (k, 14, 0xEC, 0xF1, 0x6C, 0xE1);
728 CALC_K (k, 16, 0x43, 0x30, 0x75, 0x0F);
729 CALC_K (k, 18, 0x37, 0xF8, 0x26, 0x1B);
730 CALC_K (k, 20, 0xFA, 0x87, 0x13, 0xFA);
731 CALC_K (k, 22, 0x94, 0x06, 0x48, 0x3F);
732 CALC_K (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
733 CALC_K (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
734 CALC_K (k, 28, 0x84, 0x8A, 0x54, 0x00);
735 CALC_K (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
736 } 700 }
737 701
738 return 0; 702 return 0;
diff --git a/crypto/xcbc.c b/crypto/xcbc.c
index ac68f3b62fde..a82959df678c 100644
--- a/crypto/xcbc.c
+++ b/crypto/xcbc.c
@@ -19,6 +19,7 @@
19 * Kazunori Miyazawa <miyazawa@linux-ipv6.org> 19 * Kazunori Miyazawa <miyazawa@linux-ipv6.org>
20 */ 20 */
21 21
22#include <crypto/scatterwalk.h>
22#include <linux/crypto.h> 23#include <linux/crypto.h>
23#include <linux/err.h> 24#include <linux/err.h>
24#include <linux/hardirq.h> 25#include <linux/hardirq.h>
@@ -27,7 +28,6 @@
27#include <linux/rtnetlink.h> 28#include <linux/rtnetlink.h>
28#include <linux/slab.h> 29#include <linux/slab.h>
29#include <linux/scatterlist.h> 30#include <linux/scatterlist.h>
30#include "internal.h"
31 31
32static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101, 32static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101,
33 0x02020202, 0x02020202, 0x02020202, 0x02020202, 33 0x02020202, 0x02020202, 0x02020202, 0x02020202,
@@ -307,7 +307,8 @@ static struct crypto_instance *xcbc_alloc(struct rtattr **tb)
307 case 16: 307 case 16:
308 break; 308 break;
309 default: 309 default:
310 return ERR_PTR(PTR_ERR(alg)); 310 inst = ERR_PTR(-EINVAL);
311 goto out_put_alg;
311 } 312 }
312 313
313 inst = crypto_alloc_instance("xcbc", alg); 314 inst = crypto_alloc_instance("xcbc", alg);
@@ -320,10 +321,7 @@ static struct crypto_instance *xcbc_alloc(struct rtattr **tb)
320 inst->alg.cra_alignmask = alg->cra_alignmask; 321 inst->alg.cra_alignmask = alg->cra_alignmask;
321 inst->alg.cra_type = &crypto_hash_type; 322 inst->alg.cra_type = &crypto_hash_type;
322 323
323 inst->alg.cra_hash.digestsize = 324 inst->alg.cra_hash.digestsize = alg->cra_blocksize;
324 (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
325 CRYPTO_ALG_TYPE_HASH ? alg->cra_hash.digestsize :
326 alg->cra_blocksize;
327 inst->alg.cra_ctxsize = sizeof(struct crypto_xcbc_ctx) + 325 inst->alg.cra_ctxsize = sizeof(struct crypto_xcbc_ctx) +
328 ALIGN(inst->alg.cra_blocksize * 3, sizeof(void *)); 326 ALIGN(inst->alg.cra_blocksize * 3, sizeof(void *));
329 inst->alg.cra_init = xcbc_init_tfm; 327 inst->alg.cra_init = xcbc_init_tfm;
diff --git a/drivers/char/hw_random/amd-rng.c b/drivers/char/hw_random/amd-rng.c
index 556fd81fa815..c422e870dc52 100644
--- a/drivers/char/hw_random/amd-rng.c
+++ b/drivers/char/hw_random/amd-rng.c
@@ -28,6 +28,7 @@
28#include <linux/kernel.h> 28#include <linux/kernel.h>
29#include <linux/pci.h> 29#include <linux/pci.h>
30#include <linux/hw_random.h> 30#include <linux/hw_random.h>
31#include <linux/delay.h>
31#include <asm/io.h> 32#include <asm/io.h>
32 33
33 34
@@ -52,11 +53,18 @@ MODULE_DEVICE_TABLE(pci, pci_tbl);
52static struct pci_dev *amd_pdev; 53static struct pci_dev *amd_pdev;
53 54
54 55
55static int amd_rng_data_present(struct hwrng *rng) 56static int amd_rng_data_present(struct hwrng *rng, int wait)
56{ 57{
57 u32 pmbase = (u32)rng->priv; 58 u32 pmbase = (u32)rng->priv;
59 int data, i;
58 60
59 return !!(inl(pmbase + 0xF4) & 1); 61 for (i = 0; i < 20; i++) {
62 data = !!(inl(pmbase + 0xF4) & 1);
63 if (data || !wait)
64 break;
65 udelay(10);
66 }
67 return data;
60} 68}
61 69
62static int amd_rng_data_read(struct hwrng *rng, u32 *data) 70static int amd_rng_data_read(struct hwrng *rng, u32 *data)
diff --git a/drivers/char/hw_random/core.c b/drivers/char/hw_random/core.c
index 26a860adcb38..0118b9817a95 100644
--- a/drivers/char/hw_random/core.c
+++ b/drivers/char/hw_random/core.c
@@ -66,11 +66,11 @@ static inline void hwrng_cleanup(struct hwrng *rng)
66 rng->cleanup(rng); 66 rng->cleanup(rng);
67} 67}
68 68
69static inline int hwrng_data_present(struct hwrng *rng) 69static inline int hwrng_data_present(struct hwrng *rng, int wait)
70{ 70{
71 if (!rng->data_present) 71 if (!rng->data_present)
72 return 1; 72 return 1;
73 return rng->data_present(rng); 73 return rng->data_present(rng, wait);
74} 74}
75 75
76static inline int hwrng_data_read(struct hwrng *rng, u32 *data) 76static inline int hwrng_data_read(struct hwrng *rng, u32 *data)
@@ -94,8 +94,7 @@ static ssize_t rng_dev_read(struct file *filp, char __user *buf,
94{ 94{
95 u32 data; 95 u32 data;
96 ssize_t ret = 0; 96 ssize_t ret = 0;
97 int i, err = 0; 97 int err = 0;
98 int data_present;
99 int bytes_read; 98 int bytes_read;
100 99
101 while (size) { 100 while (size) {
@@ -107,21 +106,10 @@ static ssize_t rng_dev_read(struct file *filp, char __user *buf,
107 err = -ENODEV; 106 err = -ENODEV;
108 goto out; 107 goto out;
109 } 108 }
110 if (filp->f_flags & O_NONBLOCK) { 109
111 data_present = hwrng_data_present(current_rng);
112 } else {
113 /* Some RNG require some time between data_reads to gather
114 * new entropy. Poll it.
115 */
116 for (i = 0; i < 20; i++) {
117 data_present = hwrng_data_present(current_rng);
118 if (data_present)
119 break;
120 udelay(10);
121 }
122 }
123 bytes_read = 0; 110 bytes_read = 0;
124 if (data_present) 111 if (hwrng_data_present(current_rng,
112 !(filp->f_flags & O_NONBLOCK)))
125 bytes_read = hwrng_data_read(current_rng, &data); 113 bytes_read = hwrng_data_read(current_rng, &data);
126 mutex_unlock(&rng_mutex); 114 mutex_unlock(&rng_mutex);
127 115
diff --git a/drivers/char/hw_random/geode-rng.c b/drivers/char/hw_random/geode-rng.c
index 8e8658dcd2e3..fed4ef5569f5 100644
--- a/drivers/char/hw_random/geode-rng.c
+++ b/drivers/char/hw_random/geode-rng.c
@@ -28,6 +28,7 @@
28#include <linux/kernel.h> 28#include <linux/kernel.h>
29#include <linux/pci.h> 29#include <linux/pci.h>
30#include <linux/hw_random.h> 30#include <linux/hw_random.h>
31#include <linux/delay.h>
31#include <asm/io.h> 32#include <asm/io.h>
32 33
33 34
@@ -61,11 +62,18 @@ static int geode_rng_data_read(struct hwrng *rng, u32 *data)
61 return 4; 62 return 4;
62} 63}
63 64
64static int geode_rng_data_present(struct hwrng *rng) 65static int geode_rng_data_present(struct hwrng *rng, int wait)
65{ 66{
66 void __iomem *mem = (void __iomem *)rng->priv; 67 void __iomem *mem = (void __iomem *)rng->priv;
68 int data, i;
67 69
68 return !!(readl(mem + GEODE_RNG_STATUS_REG)); 70 for (i = 0; i < 20; i++) {
71 data = !!(readl(mem + GEODE_RNG_STATUS_REG));
72 if (data || !wait)
73 break;
74 udelay(10);
75 }
76 return data;
69} 77}
70 78
71 79
diff --git a/drivers/char/hw_random/intel-rng.c b/drivers/char/hw_random/intel-rng.c
index 753f46052b87..5cc651ef75eb 100644
--- a/drivers/char/hw_random/intel-rng.c
+++ b/drivers/char/hw_random/intel-rng.c
@@ -29,6 +29,7 @@
29#include <linux/module.h> 29#include <linux/module.h>
30#include <linux/pci.h> 30#include <linux/pci.h>
31#include <linux/stop_machine.h> 31#include <linux/stop_machine.h>
32#include <linux/delay.h>
32#include <asm/io.h> 33#include <asm/io.h>
33 34
34 35
@@ -162,11 +163,19 @@ static inline u8 hwstatus_set(void __iomem *mem,
162 return hwstatus_get(mem); 163 return hwstatus_get(mem);
163} 164}
164 165
165static int intel_rng_data_present(struct hwrng *rng) 166static int intel_rng_data_present(struct hwrng *rng, int wait)
166{ 167{
167 void __iomem *mem = (void __iomem *)rng->priv; 168 void __iomem *mem = (void __iomem *)rng->priv;
168 169 int data, i;
169 return !!(readb(mem + INTEL_RNG_STATUS) & INTEL_RNG_DATA_PRESENT); 170
171 for (i = 0; i < 20; i++) {
172 data = !!(readb(mem + INTEL_RNG_STATUS) &
173 INTEL_RNG_DATA_PRESENT);
174 if (data || !wait)
175 break;
176 udelay(10);
177 }
178 return data;
170} 179}
171 180
172static int intel_rng_data_read(struct hwrng *rng, u32 *data) 181static int intel_rng_data_read(struct hwrng *rng, u32 *data)
diff --git a/drivers/char/hw_random/omap-rng.c b/drivers/char/hw_random/omap-rng.c
index 3f35a1c562b1..7e319951fa41 100644
--- a/drivers/char/hw_random/omap-rng.c
+++ b/drivers/char/hw_random/omap-rng.c
@@ -29,6 +29,7 @@
29#include <linux/err.h> 29#include <linux/err.h>
30#include <linux/platform_device.h> 30#include <linux/platform_device.h>
31#include <linux/hw_random.h> 31#include <linux/hw_random.h>
32#include <linux/delay.h>
32 33
33#include <asm/io.h> 34#include <asm/io.h>
34 35
@@ -65,9 +66,17 @@ static void omap_rng_write_reg(int reg, u32 val)
65} 66}
66 67
67/* REVISIT: Does the status bit really work on 16xx? */ 68/* REVISIT: Does the status bit really work on 16xx? */
68static int omap_rng_data_present(struct hwrng *rng) 69static int omap_rng_data_present(struct hwrng *rng, int wait)
69{ 70{
70 return omap_rng_read_reg(RNG_STAT_REG) ? 0 : 1; 71 int data, i;
72
73 for (i = 0; i < 20; i++) {
74 data = omap_rng_read_reg(RNG_STAT_REG) ? 0 : 1;
75 if (data || !wait)
76 break;
77 udelay(10);
78 }
79 return data;
71} 80}
72 81
73static int omap_rng_data_read(struct hwrng *rng, u32 *data) 82static int omap_rng_data_read(struct hwrng *rng, u32 *data)
diff --git a/drivers/char/hw_random/pasemi-rng.c b/drivers/char/hw_random/pasemi-rng.c
index fa6040b6c8f2..e2ea210cfa5f 100644
--- a/drivers/char/hw_random/pasemi-rng.c
+++ b/drivers/char/hw_random/pasemi-rng.c
@@ -23,6 +23,7 @@
23#include <linux/kernel.h> 23#include <linux/kernel.h>
24#include <linux/platform_device.h> 24#include <linux/platform_device.h>
25#include <linux/hw_random.h> 25#include <linux/hw_random.h>
26#include <linux/delay.h>
26#include <asm/of_platform.h> 27#include <asm/of_platform.h>
27#include <asm/io.h> 28#include <asm/io.h>
28 29
@@ -41,12 +42,19 @@
41 42
42#define MODULE_NAME "pasemi_rng" 43#define MODULE_NAME "pasemi_rng"
43 44
44static int pasemi_rng_data_present(struct hwrng *rng) 45static int pasemi_rng_data_present(struct hwrng *rng, int wait)
45{ 46{
46 void __iomem *rng_regs = (void __iomem *)rng->priv; 47 void __iomem *rng_regs = (void __iomem *)rng->priv;
47 48 int data, i;
48 return (in_le32(rng_regs + SDCRNG_CTL_REG) 49
49 & SDCRNG_CTL_FVLD_M) ? 1 : 0; 50 for (i = 0; i < 20; i++) {
51 data = (in_le32(rng_regs + SDCRNG_CTL_REG)
52 & SDCRNG_CTL_FVLD_M) ? 1 : 0;
53 if (data || !wait)
54 break;
55 udelay(10);
56 }
57 return data;
50} 58}
51 59
52static int pasemi_rng_data_read(struct hwrng *rng, u32 *data) 60static int pasemi_rng_data_read(struct hwrng *rng, u32 *data)
diff --git a/drivers/char/hw_random/via-rng.c b/drivers/char/hw_random/via-rng.c
index ec435cb25c4f..868e39fd42e4 100644
--- a/drivers/char/hw_random/via-rng.c
+++ b/drivers/char/hw_random/via-rng.c
@@ -27,6 +27,7 @@
27#include <linux/module.h> 27#include <linux/module.h>
28#include <linux/kernel.h> 28#include <linux/kernel.h>
29#include <linux/hw_random.h> 29#include <linux/hw_random.h>
30#include <linux/delay.h>
30#include <asm/io.h> 31#include <asm/io.h>
31#include <asm/msr.h> 32#include <asm/msr.h>
32#include <asm/cpufeature.h> 33#include <asm/cpufeature.h>
@@ -77,10 +78,11 @@ static inline u32 xstore(u32 *addr, u32 edx_in)
77 return eax_out; 78 return eax_out;
78} 79}
79 80
80static int via_rng_data_present(struct hwrng *rng) 81static int via_rng_data_present(struct hwrng *rng, int wait)
81{ 82{
82 u32 bytes_out; 83 u32 bytes_out;
83 u32 *via_rng_datum = (u32 *)(&rng->priv); 84 u32 *via_rng_datum = (u32 *)(&rng->priv);
85 int i;
84 86
85 /* We choose the recommended 1-byte-per-instruction RNG rate, 87 /* We choose the recommended 1-byte-per-instruction RNG rate,
86 * for greater randomness at the expense of speed. Larger 88 * for greater randomness at the expense of speed. Larger
@@ -95,12 +97,15 @@ static int via_rng_data_present(struct hwrng *rng)
95 * completes. 97 * completes.
96 */ 98 */
97 99
98 *via_rng_datum = 0; /* paranoia, not really necessary */ 100 for (i = 0; i < 20; i++) {
99 bytes_out = xstore(via_rng_datum, VIA_RNG_CHUNK_1); 101 *via_rng_datum = 0; /* paranoia, not really necessary */
100 bytes_out &= VIA_XSTORE_CNT_MASK; 102 bytes_out = xstore(via_rng_datum, VIA_RNG_CHUNK_1);
101 if (bytes_out == 0) 103 bytes_out &= VIA_XSTORE_CNT_MASK;
102 return 0; 104 if (bytes_out || !wait)
103 return 1; 105 break;
106 udelay(10);
107 }
108 return bytes_out ? 1 : 0;
104} 109}
105 110
106static int via_rng_data_read(struct hwrng *rng, u32 *data) 111static int via_rng_data_read(struct hwrng *rng, u32 *data)
diff --git a/drivers/crypto/Kconfig b/drivers/crypto/Kconfig
index ddd3a259cea1..74bd599dfb0c 100644
--- a/drivers/crypto/Kconfig
+++ b/drivers/crypto/Kconfig
@@ -83,4 +83,15 @@ config ZCRYPT_MONOLITHIC
83 that contains all parts of the crypto device driver (ap bus, 83 that contains all parts of the crypto device driver (ap bus,
84 request router and all the card drivers). 84 request router and all the card drivers).
85 85
86config CRYPTO_DEV_HIFN_795X
87 tristate "Driver HIFN 795x crypto accelerator chips"
88 select CRYPTO_DES
89 select CRYPTO_ALGAPI
90 select CRYPTO_BLKCIPHER
91 depends on PCI
92 help
93 This option allows you to have support for HIFN 795x crypto adapters.
94
95
96
86endif # CRYPTO_HW 97endif # CRYPTO_HW
diff --git a/drivers/crypto/Makefile b/drivers/crypto/Makefile
index d070030f7d7e..c0327f0dadc5 100644
--- a/drivers/crypto/Makefile
+++ b/drivers/crypto/Makefile
@@ -1,3 +1,4 @@
1obj-$(CONFIG_CRYPTO_DEV_PADLOCK_AES) += padlock-aes.o 1obj-$(CONFIG_CRYPTO_DEV_PADLOCK_AES) += padlock-aes.o
2obj-$(CONFIG_CRYPTO_DEV_PADLOCK_SHA) += padlock-sha.o 2obj-$(CONFIG_CRYPTO_DEV_PADLOCK_SHA) += padlock-sha.o
3obj-$(CONFIG_CRYPTO_DEV_GEODE) += geode-aes.o 3obj-$(CONFIG_CRYPTO_DEV_GEODE) += geode-aes.o
4obj-$(CONFIG_CRYPTO_DEV_HIFN_795X) += hifn_795x.o
diff --git a/drivers/crypto/geode-aes.c b/drivers/crypto/geode-aes.c
index 711e246e1ef0..4801162919d9 100644
--- a/drivers/crypto/geode-aes.c
+++ b/drivers/crypto/geode-aes.c
@@ -13,44 +13,13 @@
13#include <linux/crypto.h> 13#include <linux/crypto.h>
14#include <linux/spinlock.h> 14#include <linux/spinlock.h>
15#include <crypto/algapi.h> 15#include <crypto/algapi.h>
16#include <crypto/aes.h>
16 17
17#include <asm/io.h> 18#include <asm/io.h>
18#include <asm/delay.h> 19#include <asm/delay.h>
19 20
20#include "geode-aes.h" 21#include "geode-aes.h"
21 22
22/* Register definitions */
23
24#define AES_CTRLA_REG 0x0000
25
26#define AES_CTRL_START 0x01
27#define AES_CTRL_DECRYPT 0x00
28#define AES_CTRL_ENCRYPT 0x02
29#define AES_CTRL_WRKEY 0x04
30#define AES_CTRL_DCA 0x08
31#define AES_CTRL_SCA 0x10
32#define AES_CTRL_CBC 0x20
33
34#define AES_INTR_REG 0x0008
35
36#define AES_INTRA_PENDING (1 << 16)
37#define AES_INTRB_PENDING (1 << 17)
38
39#define AES_INTR_PENDING (AES_INTRA_PENDING | AES_INTRB_PENDING)
40#define AES_INTR_MASK 0x07
41
42#define AES_SOURCEA_REG 0x0010
43#define AES_DSTA_REG 0x0014
44#define AES_LENA_REG 0x0018
45#define AES_WRITEKEY0_REG 0x0030
46#define AES_WRITEIV0_REG 0x0040
47
48/* A very large counter that is used to gracefully bail out of an
49 * operation in case of trouble
50 */
51
52#define AES_OP_TIMEOUT 0x50000
53
54/* Static structures */ 23/* Static structures */
55 24
56static void __iomem * _iobase; 25static void __iomem * _iobase;
@@ -87,9 +56,10 @@ do_crypt(void *src, void *dst, int len, u32 flags)
87 /* Start the operation */ 56 /* Start the operation */
88 iowrite32(AES_CTRL_START | flags, _iobase + AES_CTRLA_REG); 57 iowrite32(AES_CTRL_START | flags, _iobase + AES_CTRLA_REG);
89 58
90 do 59 do {
91 status = ioread32(_iobase + AES_INTR_REG); 60 status = ioread32(_iobase + AES_INTR_REG);
92 while(!(status & AES_INTRA_PENDING) && --counter); 61 cpu_relax();
62 } while(!(status & AES_INTRA_PENDING) && --counter);
93 63
94 /* Clear the event */ 64 /* Clear the event */
95 iowrite32((status & 0xFF) | AES_INTRA_PENDING, _iobase + AES_INTR_REG); 65 iowrite32((status & 0xFF) | AES_INTRA_PENDING, _iobase + AES_INTR_REG);
@@ -101,6 +71,7 @@ geode_aes_crypt(struct geode_aes_op *op)
101{ 71{
102 u32 flags = 0; 72 u32 flags = 0;
103 unsigned long iflags; 73 unsigned long iflags;
74 int ret;
104 75
105 if (op->len == 0) 76 if (op->len == 0)
106 return 0; 77 return 0;
@@ -129,7 +100,8 @@ geode_aes_crypt(struct geode_aes_op *op)
129 _writefield(AES_WRITEKEY0_REG, op->key); 100 _writefield(AES_WRITEKEY0_REG, op->key);
130 } 101 }
131 102
132 do_crypt(op->src, op->dst, op->len, flags); 103 ret = do_crypt(op->src, op->dst, op->len, flags);
104 BUG_ON(ret);
133 105
134 if (op->mode == AES_MODE_CBC) 106 if (op->mode == AES_MODE_CBC)
135 _readfield(AES_WRITEIV0_REG, op->iv); 107 _readfield(AES_WRITEIV0_REG, op->iv);
@@ -141,18 +113,103 @@ geode_aes_crypt(struct geode_aes_op *op)
141 113
142/* CRYPTO-API Functions */ 114/* CRYPTO-API Functions */
143 115
144static int 116static int geode_setkey_cip(struct crypto_tfm *tfm, const u8 *key,
145geode_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int len) 117 unsigned int len)
146{ 118{
147 struct geode_aes_op *op = crypto_tfm_ctx(tfm); 119 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
120 unsigned int ret;
148 121
149 if (len != AES_KEY_LENGTH) { 122 op->keylen = len;
123
124 if (len == AES_KEYSIZE_128) {
125 memcpy(op->key, key, len);
126 return 0;
127 }
128
129 if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) {
130 /* not supported at all */
150 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 131 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
151 return -EINVAL; 132 return -EINVAL;
152 } 133 }
153 134
154 memcpy(op->key, key, len); 135 /*
155 return 0; 136 * The requested key size is not supported by HW, do a fallback
137 */
138 op->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
139 op->fallback.blk->base.crt_flags |= (tfm->crt_flags & CRYPTO_TFM_REQ_MASK);
140
141 ret = crypto_cipher_setkey(op->fallback.cip, key, len);
142 if (ret) {
143 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
144 tfm->crt_flags |= (op->fallback.blk->base.crt_flags & CRYPTO_TFM_RES_MASK);
145 }
146 return ret;
147}
148
149static int geode_setkey_blk(struct crypto_tfm *tfm, const u8 *key,
150 unsigned int len)
151{
152 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
153 unsigned int ret;
154
155 op->keylen = len;
156
157 if (len == AES_KEYSIZE_128) {
158 memcpy(op->key, key, len);
159 return 0;
160 }
161
162 if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256) {
163 /* not supported at all */
164 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
165 return -EINVAL;
166 }
167
168 /*
169 * The requested key size is not supported by HW, do a fallback
170 */
171 op->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
172 op->fallback.blk->base.crt_flags |= (tfm->crt_flags & CRYPTO_TFM_REQ_MASK);
173
174 ret = crypto_blkcipher_setkey(op->fallback.blk, key, len);
175 if (ret) {
176 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
177 tfm->crt_flags |= (op->fallback.blk->base.crt_flags & CRYPTO_TFM_RES_MASK);
178 }
179 return ret;
180}
181
182static int fallback_blk_dec(struct blkcipher_desc *desc,
183 struct scatterlist *dst, struct scatterlist *src,
184 unsigned int nbytes)
185{
186 unsigned int ret;
187 struct crypto_blkcipher *tfm;
188 struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
189
190 tfm = desc->tfm;
191 desc->tfm = op->fallback.blk;
192
193 ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
194
195 desc->tfm = tfm;
196 return ret;
197}
198static int fallback_blk_enc(struct blkcipher_desc *desc,
199 struct scatterlist *dst, struct scatterlist *src,
200 unsigned int nbytes)
201{
202 unsigned int ret;
203 struct crypto_blkcipher *tfm;
204 struct geode_aes_op *op = crypto_blkcipher_ctx(desc->tfm);
205
206 tfm = desc->tfm;
207 desc->tfm = op->fallback.blk;
208
209 ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
210
211 desc->tfm = tfm;
212 return ret;
156} 213}
157 214
158static void 215static void
@@ -160,8 +217,10 @@ geode_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
160{ 217{
161 struct geode_aes_op *op = crypto_tfm_ctx(tfm); 218 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
162 219
163 if ((out == NULL) || (in == NULL)) 220 if (unlikely(op->keylen != AES_KEYSIZE_128)) {
221 crypto_cipher_encrypt_one(op->fallback.cip, out, in);
164 return; 222 return;
223 }
165 224
166 op->src = (void *) in; 225 op->src = (void *) in;
167 op->dst = (void *) out; 226 op->dst = (void *) out;
@@ -179,8 +238,10 @@ geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
179{ 238{
180 struct geode_aes_op *op = crypto_tfm_ctx(tfm); 239 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
181 240
182 if ((out == NULL) || (in == NULL)) 241 if (unlikely(op->keylen != AES_KEYSIZE_128)) {
242 crypto_cipher_decrypt_one(op->fallback.cip, out, in);
183 return; 243 return;
244 }
184 245
185 op->src = (void *) in; 246 op->src = (void *) in;
186 op->dst = (void *) out; 247 op->dst = (void *) out;
@@ -192,24 +253,50 @@ geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
192 geode_aes_crypt(op); 253 geode_aes_crypt(op);
193} 254}
194 255
256static int fallback_init_cip(struct crypto_tfm *tfm)
257{
258 const char *name = tfm->__crt_alg->cra_name;
259 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
260
261 op->fallback.cip = crypto_alloc_cipher(name, 0,
262 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
263
264 if (IS_ERR(op->fallback.cip)) {
265 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
266 return PTR_ERR(op->fallback.blk);
267 }
268
269 return 0;
270}
271
272static void fallback_exit_cip(struct crypto_tfm *tfm)
273{
274 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
275
276 crypto_free_cipher(op->fallback.cip);
277 op->fallback.cip = NULL;
278}
195 279
196static struct crypto_alg geode_alg = { 280static struct crypto_alg geode_alg = {
197 .cra_name = "aes", 281 .cra_name = "aes",
198 .cra_driver_name = "geode-aes-128", 282 .cra_driver_name = "geode-aes",
199 .cra_priority = 300, 283 .cra_priority = 300,
200 .cra_alignmask = 15, 284 .cra_alignmask = 15,
201 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 285 .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
286 CRYPTO_ALG_NEED_FALLBACK,
287 .cra_init = fallback_init_cip,
288 .cra_exit = fallback_exit_cip,
202 .cra_blocksize = AES_MIN_BLOCK_SIZE, 289 .cra_blocksize = AES_MIN_BLOCK_SIZE,
203 .cra_ctxsize = sizeof(struct geode_aes_op), 290 .cra_ctxsize = sizeof(struct geode_aes_op),
204 .cra_module = THIS_MODULE, 291 .cra_module = THIS_MODULE,
205 .cra_list = LIST_HEAD_INIT(geode_alg.cra_list), 292 .cra_list = LIST_HEAD_INIT(geode_alg.cra_list),
206 .cra_u = { 293 .cra_u = {
207 .cipher = { 294 .cipher = {
208 .cia_min_keysize = AES_KEY_LENGTH, 295 .cia_min_keysize = AES_MIN_KEY_SIZE,
209 .cia_max_keysize = AES_KEY_LENGTH, 296 .cia_max_keysize = AES_MAX_KEY_SIZE,
210 .cia_setkey = geode_setkey, 297 .cia_setkey = geode_setkey_cip,
211 .cia_encrypt = geode_encrypt, 298 .cia_encrypt = geode_encrypt,
212 .cia_decrypt = geode_decrypt 299 .cia_decrypt = geode_decrypt
213 } 300 }
214 } 301 }
215}; 302};
@@ -223,8 +310,12 @@ geode_cbc_decrypt(struct blkcipher_desc *desc,
223 struct blkcipher_walk walk; 310 struct blkcipher_walk walk;
224 int err, ret; 311 int err, ret;
225 312
313 if (unlikely(op->keylen != AES_KEYSIZE_128))
314 return fallback_blk_dec(desc, dst, src, nbytes);
315
226 blkcipher_walk_init(&walk, dst, src, nbytes); 316 blkcipher_walk_init(&walk, dst, src, nbytes);
227 err = blkcipher_walk_virt(desc, &walk); 317 err = blkcipher_walk_virt(desc, &walk);
318 op->iv = walk.iv;
228 319
229 while((nbytes = walk.nbytes)) { 320 while((nbytes = walk.nbytes)) {
230 op->src = walk.src.virt.addr, 321 op->src = walk.src.virt.addr,
@@ -233,13 +324,9 @@ geode_cbc_decrypt(struct blkcipher_desc *desc,
233 op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE); 324 op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
234 op->dir = AES_DIR_DECRYPT; 325 op->dir = AES_DIR_DECRYPT;
235 326
236 memcpy(op->iv, walk.iv, AES_IV_LENGTH);
237
238 ret = geode_aes_crypt(op); 327 ret = geode_aes_crypt(op);
239 328
240 memcpy(walk.iv, op->iv, AES_IV_LENGTH);
241 nbytes -= ret; 329 nbytes -= ret;
242
243 err = blkcipher_walk_done(desc, &walk, nbytes); 330 err = blkcipher_walk_done(desc, &walk, nbytes);
244 } 331 }
245 332
@@ -255,8 +342,12 @@ geode_cbc_encrypt(struct blkcipher_desc *desc,
255 struct blkcipher_walk walk; 342 struct blkcipher_walk walk;
256 int err, ret; 343 int err, ret;
257 344
345 if (unlikely(op->keylen != AES_KEYSIZE_128))
346 return fallback_blk_enc(desc, dst, src, nbytes);
347
258 blkcipher_walk_init(&walk, dst, src, nbytes); 348 blkcipher_walk_init(&walk, dst, src, nbytes);
259 err = blkcipher_walk_virt(desc, &walk); 349 err = blkcipher_walk_virt(desc, &walk);
350 op->iv = walk.iv;
260 351
261 while((nbytes = walk.nbytes)) { 352 while((nbytes = walk.nbytes)) {
262 op->src = walk.src.virt.addr, 353 op->src = walk.src.virt.addr,
@@ -265,8 +356,6 @@ geode_cbc_encrypt(struct blkcipher_desc *desc,
265 op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE); 356 op->len = nbytes - (nbytes % AES_MIN_BLOCK_SIZE);
266 op->dir = AES_DIR_ENCRYPT; 357 op->dir = AES_DIR_ENCRYPT;
267 358
268 memcpy(op->iv, walk.iv, AES_IV_LENGTH);
269
270 ret = geode_aes_crypt(op); 359 ret = geode_aes_crypt(op);
271 nbytes -= ret; 360 nbytes -= ret;
272 err = blkcipher_walk_done(desc, &walk, nbytes); 361 err = blkcipher_walk_done(desc, &walk, nbytes);
@@ -275,22 +364,49 @@ geode_cbc_encrypt(struct blkcipher_desc *desc,
275 return err; 364 return err;
276} 365}
277 366
367static int fallback_init_blk(struct crypto_tfm *tfm)
368{
369 const char *name = tfm->__crt_alg->cra_name;
370 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
371
372 op->fallback.blk = crypto_alloc_blkcipher(name, 0,
373 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
374
375 if (IS_ERR(op->fallback.blk)) {
376 printk(KERN_ERR "Error allocating fallback algo %s\n", name);
377 return PTR_ERR(op->fallback.blk);
378 }
379
380 return 0;
381}
382
383static void fallback_exit_blk(struct crypto_tfm *tfm)
384{
385 struct geode_aes_op *op = crypto_tfm_ctx(tfm);
386
387 crypto_free_blkcipher(op->fallback.blk);
388 op->fallback.blk = NULL;
389}
390
278static struct crypto_alg geode_cbc_alg = { 391static struct crypto_alg geode_cbc_alg = {
279 .cra_name = "cbc(aes)", 392 .cra_name = "cbc(aes)",
280 .cra_driver_name = "cbc-aes-geode-128", 393 .cra_driver_name = "cbc-aes-geode",
281 .cra_priority = 400, 394 .cra_priority = 400,
282 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 395 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
396 CRYPTO_ALG_NEED_FALLBACK,
397 .cra_init = fallback_init_blk,
398 .cra_exit = fallback_exit_blk,
283 .cra_blocksize = AES_MIN_BLOCK_SIZE, 399 .cra_blocksize = AES_MIN_BLOCK_SIZE,
284 .cra_ctxsize = sizeof(struct geode_aes_op), 400 .cra_ctxsize = sizeof(struct geode_aes_op),
285 .cra_alignmask = 15, 401 .cra_alignmask = 15,
286 .cra_type = &crypto_blkcipher_type, 402 .cra_type = &crypto_blkcipher_type,
287 .cra_module = THIS_MODULE, 403 .cra_module = THIS_MODULE,
288 .cra_list = LIST_HEAD_INIT(geode_cbc_alg.cra_list), 404 .cra_list = LIST_HEAD_INIT(geode_cbc_alg.cra_list),
289 .cra_u = { 405 .cra_u = {
290 .blkcipher = { 406 .blkcipher = {
291 .min_keysize = AES_KEY_LENGTH, 407 .min_keysize = AES_MIN_KEY_SIZE,
292 .max_keysize = AES_KEY_LENGTH, 408 .max_keysize = AES_MAX_KEY_SIZE,
293 .setkey = geode_setkey, 409 .setkey = geode_setkey_blk,
294 .encrypt = geode_cbc_encrypt, 410 .encrypt = geode_cbc_encrypt,
295 .decrypt = geode_cbc_decrypt, 411 .decrypt = geode_cbc_decrypt,
296 .ivsize = AES_IV_LENGTH, 412 .ivsize = AES_IV_LENGTH,
@@ -307,6 +423,9 @@ geode_ecb_decrypt(struct blkcipher_desc *desc,
307 struct blkcipher_walk walk; 423 struct blkcipher_walk walk;
308 int err, ret; 424 int err, ret;
309 425
426 if (unlikely(op->keylen != AES_KEYSIZE_128))
427 return fallback_blk_dec(desc, dst, src, nbytes);
428
310 blkcipher_walk_init(&walk, dst, src, nbytes); 429 blkcipher_walk_init(&walk, dst, src, nbytes);
311 err = blkcipher_walk_virt(desc, &walk); 430 err = blkcipher_walk_virt(desc, &walk);
312 431
@@ -334,6 +453,9 @@ geode_ecb_encrypt(struct blkcipher_desc *desc,
334 struct blkcipher_walk walk; 453 struct blkcipher_walk walk;
335 int err, ret; 454 int err, ret;
336 455
456 if (unlikely(op->keylen != AES_KEYSIZE_128))
457 return fallback_blk_enc(desc, dst, src, nbytes);
458
337 blkcipher_walk_init(&walk, dst, src, nbytes); 459 blkcipher_walk_init(&walk, dst, src, nbytes);
338 err = blkcipher_walk_virt(desc, &walk); 460 err = blkcipher_walk_virt(desc, &walk);
339 461
@@ -353,28 +475,31 @@ geode_ecb_encrypt(struct blkcipher_desc *desc,
353} 475}
354 476
355static struct crypto_alg geode_ecb_alg = { 477static struct crypto_alg geode_ecb_alg = {
356 .cra_name = "ecb(aes)", 478 .cra_name = "ecb(aes)",
357 .cra_driver_name = "ecb-aes-geode-128", 479 .cra_driver_name = "ecb-aes-geode",
358 .cra_priority = 400, 480 .cra_priority = 400,
359 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 481 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
482 CRYPTO_ALG_NEED_FALLBACK,
483 .cra_init = fallback_init_blk,
484 .cra_exit = fallback_exit_blk,
360 .cra_blocksize = AES_MIN_BLOCK_SIZE, 485 .cra_blocksize = AES_MIN_BLOCK_SIZE,
361 .cra_ctxsize = sizeof(struct geode_aes_op), 486 .cra_ctxsize = sizeof(struct geode_aes_op),
362 .cra_alignmask = 15, 487 .cra_alignmask = 15,
363 .cra_type = &crypto_blkcipher_type, 488 .cra_type = &crypto_blkcipher_type,
364 .cra_module = THIS_MODULE, 489 .cra_module = THIS_MODULE,
365 .cra_list = LIST_HEAD_INIT(geode_ecb_alg.cra_list), 490 .cra_list = LIST_HEAD_INIT(geode_ecb_alg.cra_list),
366 .cra_u = { 491 .cra_u = {
367 .blkcipher = { 492 .blkcipher = {
368 .min_keysize = AES_KEY_LENGTH, 493 .min_keysize = AES_MIN_KEY_SIZE,
369 .max_keysize = AES_KEY_LENGTH, 494 .max_keysize = AES_MAX_KEY_SIZE,
370 .setkey = geode_setkey, 495 .setkey = geode_setkey_blk,
371 .encrypt = geode_ecb_encrypt, 496 .encrypt = geode_ecb_encrypt,
372 .decrypt = geode_ecb_decrypt, 497 .decrypt = geode_ecb_decrypt,
373 } 498 }
374 } 499 }
375}; 500};
376 501
377static void 502static void __devexit
378geode_aes_remove(struct pci_dev *dev) 503geode_aes_remove(struct pci_dev *dev)
379{ 504{
380 crypto_unregister_alg(&geode_alg); 505 crypto_unregister_alg(&geode_alg);
@@ -389,7 +514,7 @@ geode_aes_remove(struct pci_dev *dev)
389} 514}
390 515
391 516
392static int 517static int __devinit
393geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id) 518geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id)
394{ 519{
395 int ret; 520 int ret;
@@ -397,7 +522,7 @@ geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id)
397 if ((ret = pci_enable_device(dev))) 522 if ((ret = pci_enable_device(dev)))
398 return ret; 523 return ret;
399 524
400 if ((ret = pci_request_regions(dev, "geode-aes-128"))) 525 if ((ret = pci_request_regions(dev, "geode-aes")))
401 goto eenable; 526 goto eenable;
402 527
403 _iobase = pci_iomap(dev, 0, 0); 528 _iobase = pci_iomap(dev, 0, 0);
@@ -472,7 +597,6 @@ geode_aes_exit(void)
472MODULE_AUTHOR("Advanced Micro Devices, Inc."); 597MODULE_AUTHOR("Advanced Micro Devices, Inc.");
473MODULE_DESCRIPTION("Geode LX Hardware AES driver"); 598MODULE_DESCRIPTION("Geode LX Hardware AES driver");
474MODULE_LICENSE("GPL"); 599MODULE_LICENSE("GPL");
475MODULE_ALIAS("aes");
476 600
477module_init(geode_aes_init); 601module_init(geode_aes_init);
478module_exit(geode_aes_exit); 602module_exit(geode_aes_exit);
diff --git a/drivers/crypto/geode-aes.h b/drivers/crypto/geode-aes.h
index f47968671ae7..f1855b50da48 100644
--- a/drivers/crypto/geode-aes.h
+++ b/drivers/crypto/geode-aes.h
@@ -9,9 +9,9 @@
9#ifndef _GEODE_AES_H_ 9#ifndef _GEODE_AES_H_
10#define _GEODE_AES_H_ 10#define _GEODE_AES_H_
11 11
12#define AES_KEY_LENGTH 16 12/* driver logic flags */
13#define AES_IV_LENGTH 16 13#define AES_IV_LENGTH 16
14 14#define AES_KEY_LENGTH 16
15#define AES_MIN_BLOCK_SIZE 16 15#define AES_MIN_BLOCK_SIZE 16
16 16
17#define AES_MODE_ECB 0 17#define AES_MODE_ECB 0
@@ -22,6 +22,38 @@
22 22
23#define AES_FLAGS_HIDDENKEY (1 << 0) 23#define AES_FLAGS_HIDDENKEY (1 << 0)
24 24
25/* Register definitions */
26
27#define AES_CTRLA_REG 0x0000
28
29#define AES_CTRL_START 0x01
30#define AES_CTRL_DECRYPT 0x00
31#define AES_CTRL_ENCRYPT 0x02
32#define AES_CTRL_WRKEY 0x04
33#define AES_CTRL_DCA 0x08
34#define AES_CTRL_SCA 0x10
35#define AES_CTRL_CBC 0x20
36
37#define AES_INTR_REG 0x0008
38
39#define AES_INTRA_PENDING (1 << 16)
40#define AES_INTRB_PENDING (1 << 17)
41
42#define AES_INTR_PENDING (AES_INTRA_PENDING | AES_INTRB_PENDING)
43#define AES_INTR_MASK 0x07
44
45#define AES_SOURCEA_REG 0x0010
46#define AES_DSTA_REG 0x0014
47#define AES_LENA_REG 0x0018
48#define AES_WRITEKEY0_REG 0x0030
49#define AES_WRITEIV0_REG 0x0040
50
51/* A very large counter that is used to gracefully bail out of an
52 * operation in case of trouble
53 */
54
55#define AES_OP_TIMEOUT 0x50000
56
25struct geode_aes_op { 57struct geode_aes_op {
26 58
27 void *src; 59 void *src;
@@ -33,7 +65,13 @@ struct geode_aes_op {
33 int len; 65 int len;
34 66
35 u8 key[AES_KEY_LENGTH]; 67 u8 key[AES_KEY_LENGTH];
36 u8 iv[AES_IV_LENGTH]; 68 u8 *iv;
69
70 union {
71 struct crypto_blkcipher *blk;
72 struct crypto_cipher *cip;
73 } fallback;
74 u32 keylen;
37}; 75};
38 76
39#endif 77#endif
diff --git a/drivers/crypto/hifn_795x.c b/drivers/crypto/hifn_795x.c
new file mode 100644
index 000000000000..16413e57597c
--- /dev/null
+++ b/drivers/crypto/hifn_795x.c
@@ -0,0 +1,2838 @@
1/*
2 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
3 * All rights reserved.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18 */
19
20#include <linux/kernel.h>
21#include <linux/module.h>
22#include <linux/moduleparam.h>
23#include <linux/mod_devicetable.h>
24#include <linux/interrupt.h>
25#include <linux/pci.h>
26#include <linux/slab.h>
27#include <linux/delay.h>
28#include <linux/mm.h>
29#include <linux/dma-mapping.h>
30#include <linux/scatterlist.h>
31#include <linux/highmem.h>
32#include <linux/interrupt.h>
33#include <linux/crypto.h>
34#include <linux/hw_random.h>
35#include <linux/ktime.h>
36
37#include <crypto/algapi.h>
38#include <crypto/des.h>
39
40#include <asm/kmap_types.h>
41
42#undef dprintk
43
44#define HIFN_TEST
45//#define HIFN_DEBUG
46
47#ifdef HIFN_DEBUG
48#define dprintk(f, a...) printk(f, ##a)
49#else
50#define dprintk(f, a...) do {} while (0)
51#endif
52
53static char hifn_pll_ref[sizeof("extNNN")] = "ext";
54module_param_string(hifn_pll_ref, hifn_pll_ref, sizeof(hifn_pll_ref), 0444);
55MODULE_PARM_DESC(hifn_pll_ref,
56 "PLL reference clock (pci[freq] or ext[freq], default ext)");
57
58static atomic_t hifn_dev_number;
59
60#define ACRYPTO_OP_DECRYPT 0
61#define ACRYPTO_OP_ENCRYPT 1
62#define ACRYPTO_OP_HMAC 2
63#define ACRYPTO_OP_RNG 3
64
65#define ACRYPTO_MODE_ECB 0
66#define ACRYPTO_MODE_CBC 1
67#define ACRYPTO_MODE_CFB 2
68#define ACRYPTO_MODE_OFB 3
69
70#define ACRYPTO_TYPE_AES_128 0
71#define ACRYPTO_TYPE_AES_192 1
72#define ACRYPTO_TYPE_AES_256 2
73#define ACRYPTO_TYPE_3DES 3
74#define ACRYPTO_TYPE_DES 4
75
76#define PCI_VENDOR_ID_HIFN 0x13A3
77#define PCI_DEVICE_ID_HIFN_7955 0x0020
78#define PCI_DEVICE_ID_HIFN_7956 0x001d
79
80/* I/O region sizes */
81
82#define HIFN_BAR0_SIZE 0x1000
83#define HIFN_BAR1_SIZE 0x2000
84#define HIFN_BAR2_SIZE 0x8000
85
86/* DMA registres */
87
88#define HIFN_DMA_CRA 0x0C /* DMA Command Ring Address */
89#define HIFN_DMA_SDRA 0x1C /* DMA Source Data Ring Address */
90#define HIFN_DMA_RRA 0x2C /* DMA Result Ring Address */
91#define HIFN_DMA_DDRA 0x3C /* DMA Destination Data Ring Address */
92#define HIFN_DMA_STCTL 0x40 /* DMA Status and Control */
93#define HIFN_DMA_INTREN 0x44 /* DMA Interrupt Enable */
94#define HIFN_DMA_CFG1 0x48 /* DMA Configuration #1 */
95#define HIFN_DMA_CFG2 0x6C /* DMA Configuration #2 */
96#define HIFN_CHIP_ID 0x98 /* Chip ID */
97
98/*
99 * Processing Unit Registers (offset from BASEREG0)
100 */
101#define HIFN_0_PUDATA 0x00 /* Processing Unit Data */
102#define HIFN_0_PUCTRL 0x04 /* Processing Unit Control */
103#define HIFN_0_PUISR 0x08 /* Processing Unit Interrupt Status */
104#define HIFN_0_PUCNFG 0x0c /* Processing Unit Configuration */
105#define HIFN_0_PUIER 0x10 /* Processing Unit Interrupt Enable */
106#define HIFN_0_PUSTAT 0x14 /* Processing Unit Status/Chip ID */
107#define HIFN_0_FIFOSTAT 0x18 /* FIFO Status */
108#define HIFN_0_FIFOCNFG 0x1c /* FIFO Configuration */
109#define HIFN_0_SPACESIZE 0x20 /* Register space size */
110
111/* Processing Unit Control Register (HIFN_0_PUCTRL) */
112#define HIFN_PUCTRL_CLRSRCFIFO 0x0010 /* clear source fifo */
113#define HIFN_PUCTRL_STOP 0x0008 /* stop pu */
114#define HIFN_PUCTRL_LOCKRAM 0x0004 /* lock ram */
115#define HIFN_PUCTRL_DMAENA 0x0002 /* enable dma */
116#define HIFN_PUCTRL_RESET 0x0001 /* Reset processing unit */
117
118/* Processing Unit Interrupt Status Register (HIFN_0_PUISR) */
119#define HIFN_PUISR_CMDINVAL 0x8000 /* Invalid command interrupt */
120#define HIFN_PUISR_DATAERR 0x4000 /* Data error interrupt */
121#define HIFN_PUISR_SRCFIFO 0x2000 /* Source FIFO ready interrupt */
122#define HIFN_PUISR_DSTFIFO 0x1000 /* Destination FIFO ready interrupt */
123#define HIFN_PUISR_DSTOVER 0x0200 /* Destination overrun interrupt */
124#define HIFN_PUISR_SRCCMD 0x0080 /* Source command interrupt */
125#define HIFN_PUISR_SRCCTX 0x0040 /* Source context interrupt */
126#define HIFN_PUISR_SRCDATA 0x0020 /* Source data interrupt */
127#define HIFN_PUISR_DSTDATA 0x0010 /* Destination data interrupt */
128#define HIFN_PUISR_DSTRESULT 0x0004 /* Destination result interrupt */
129
130/* Processing Unit Configuration Register (HIFN_0_PUCNFG) */
131#define HIFN_PUCNFG_DRAMMASK 0xe000 /* DRAM size mask */
132#define HIFN_PUCNFG_DSZ_256K 0x0000 /* 256k dram */
133#define HIFN_PUCNFG_DSZ_512K 0x2000 /* 512k dram */
134#define HIFN_PUCNFG_DSZ_1M 0x4000 /* 1m dram */
135#define HIFN_PUCNFG_DSZ_2M 0x6000 /* 2m dram */
136#define HIFN_PUCNFG_DSZ_4M 0x8000 /* 4m dram */
137#define HIFN_PUCNFG_DSZ_8M 0xa000 /* 8m dram */
138#define HIFN_PUNCFG_DSZ_16M 0xc000 /* 16m dram */
139#define HIFN_PUCNFG_DSZ_32M 0xe000 /* 32m dram */
140#define HIFN_PUCNFG_DRAMREFRESH 0x1800 /* DRAM refresh rate mask */
141#define HIFN_PUCNFG_DRFR_512 0x0000 /* 512 divisor of ECLK */
142#define HIFN_PUCNFG_DRFR_256 0x0800 /* 256 divisor of ECLK */
143#define HIFN_PUCNFG_DRFR_128 0x1000 /* 128 divisor of ECLK */
144#define HIFN_PUCNFG_TCALLPHASES 0x0200 /* your guess is as good as mine... */
145#define HIFN_PUCNFG_TCDRVTOTEM 0x0100 /* your guess is as good as mine... */
146#define HIFN_PUCNFG_BIGENDIAN 0x0080 /* DMA big endian mode */
147#define HIFN_PUCNFG_BUS32 0x0040 /* Bus width 32bits */
148#define HIFN_PUCNFG_BUS16 0x0000 /* Bus width 16 bits */
149#define HIFN_PUCNFG_CHIPID 0x0020 /* Allow chipid from PUSTAT */
150#define HIFN_PUCNFG_DRAM 0x0010 /* Context RAM is DRAM */
151#define HIFN_PUCNFG_SRAM 0x0000 /* Context RAM is SRAM */
152#define HIFN_PUCNFG_COMPSING 0x0004 /* Enable single compression context */
153#define HIFN_PUCNFG_ENCCNFG 0x0002 /* Encryption configuration */
154
155/* Processing Unit Interrupt Enable Register (HIFN_0_PUIER) */
156#define HIFN_PUIER_CMDINVAL 0x8000 /* Invalid command interrupt */
157#define HIFN_PUIER_DATAERR 0x4000 /* Data error interrupt */
158#define HIFN_PUIER_SRCFIFO 0x2000 /* Source FIFO ready interrupt */
159#define HIFN_PUIER_DSTFIFO 0x1000 /* Destination FIFO ready interrupt */
160#define HIFN_PUIER_DSTOVER 0x0200 /* Destination overrun interrupt */
161#define HIFN_PUIER_SRCCMD 0x0080 /* Source command interrupt */
162#define HIFN_PUIER_SRCCTX 0x0040 /* Source context interrupt */
163#define HIFN_PUIER_SRCDATA 0x0020 /* Source data interrupt */
164#define HIFN_PUIER_DSTDATA 0x0010 /* Destination data interrupt */
165#define HIFN_PUIER_DSTRESULT 0x0004 /* Destination result interrupt */
166
167/* Processing Unit Status Register/Chip ID (HIFN_0_PUSTAT) */
168#define HIFN_PUSTAT_CMDINVAL 0x8000 /* Invalid command interrupt */
169#define HIFN_PUSTAT_DATAERR 0x4000 /* Data error interrupt */
170#define HIFN_PUSTAT_SRCFIFO 0x2000 /* Source FIFO ready interrupt */
171#define HIFN_PUSTAT_DSTFIFO 0x1000 /* Destination FIFO ready interrupt */
172#define HIFN_PUSTAT_DSTOVER 0x0200 /* Destination overrun interrupt */
173#define HIFN_PUSTAT_SRCCMD 0x0080 /* Source command interrupt */
174#define HIFN_PUSTAT_SRCCTX 0x0040 /* Source context interrupt */
175#define HIFN_PUSTAT_SRCDATA 0x0020 /* Source data interrupt */
176#define HIFN_PUSTAT_DSTDATA 0x0010 /* Destination data interrupt */
177#define HIFN_PUSTAT_DSTRESULT 0x0004 /* Destination result interrupt */
178#define HIFN_PUSTAT_CHIPREV 0x00ff /* Chip revision mask */
179#define HIFN_PUSTAT_CHIPENA 0xff00 /* Chip enabled mask */
180#define HIFN_PUSTAT_ENA_2 0x1100 /* Level 2 enabled */
181#define HIFN_PUSTAT_ENA_1 0x1000 /* Level 1 enabled */
182#define HIFN_PUSTAT_ENA_0 0x3000 /* Level 0 enabled */
183#define HIFN_PUSTAT_REV_2 0x0020 /* 7751 PT6/2 */
184#define HIFN_PUSTAT_REV_3 0x0030 /* 7751 PT6/3 */
185
186/* FIFO Status Register (HIFN_0_FIFOSTAT) */
187#define HIFN_FIFOSTAT_SRC 0x7f00 /* Source FIFO available */
188#define HIFN_FIFOSTAT_DST 0x007f /* Destination FIFO available */
189
190/* FIFO Configuration Register (HIFN_0_FIFOCNFG) */
191#define HIFN_FIFOCNFG_THRESHOLD 0x0400 /* must be written as 1 */
192
193/*
194 * DMA Interface Registers (offset from BASEREG1)
195 */
196#define HIFN_1_DMA_CRAR 0x0c /* DMA Command Ring Address */
197#define HIFN_1_DMA_SRAR 0x1c /* DMA Source Ring Address */
198#define HIFN_1_DMA_RRAR 0x2c /* DMA Result Ring Address */
199#define HIFN_1_DMA_DRAR 0x3c /* DMA Destination Ring Address */
200#define HIFN_1_DMA_CSR 0x40 /* DMA Status and Control */
201#define HIFN_1_DMA_IER 0x44 /* DMA Interrupt Enable */
202#define HIFN_1_DMA_CNFG 0x48 /* DMA Configuration */
203#define HIFN_1_PLL 0x4c /* 795x: PLL config */
204#define HIFN_1_7811_RNGENA 0x60 /* 7811: rng enable */
205#define HIFN_1_7811_RNGCFG 0x64 /* 7811: rng config */
206#define HIFN_1_7811_RNGDAT 0x68 /* 7811: rng data */
207#define HIFN_1_7811_RNGSTS 0x6c /* 7811: rng status */
208#define HIFN_1_7811_MIPSRST 0x94 /* 7811: MIPS reset */
209#define HIFN_1_REVID 0x98 /* Revision ID */
210#define HIFN_1_UNLOCK_SECRET1 0xf4
211#define HIFN_1_UNLOCK_SECRET2 0xfc
212#define HIFN_1_PUB_RESET 0x204 /* Public/RNG Reset */
213#define HIFN_1_PUB_BASE 0x300 /* Public Base Address */
214#define HIFN_1_PUB_OPLEN 0x304 /* Public Operand Length */
215#define HIFN_1_PUB_OP 0x308 /* Public Operand */
216#define HIFN_1_PUB_STATUS 0x30c /* Public Status */
217#define HIFN_1_PUB_IEN 0x310 /* Public Interrupt enable */
218#define HIFN_1_RNG_CONFIG 0x314 /* RNG config */
219#define HIFN_1_RNG_DATA 0x318 /* RNG data */
220#define HIFN_1_PUB_MEM 0x400 /* start of Public key memory */
221#define HIFN_1_PUB_MEMEND 0xbff /* end of Public key memory */
222
223/* DMA Status and Control Register (HIFN_1_DMA_CSR) */
224#define HIFN_DMACSR_D_CTRLMASK 0xc0000000 /* Destinition Ring Control */
225#define HIFN_DMACSR_D_CTRL_NOP 0x00000000 /* Dest. Control: no-op */
226#define HIFN_DMACSR_D_CTRL_DIS 0x40000000 /* Dest. Control: disable */
227#define HIFN_DMACSR_D_CTRL_ENA 0x80000000 /* Dest. Control: enable */
228#define HIFN_DMACSR_D_ABORT 0x20000000 /* Destinition Ring PCIAbort */
229#define HIFN_DMACSR_D_DONE 0x10000000 /* Destinition Ring Done */
230#define HIFN_DMACSR_D_LAST 0x08000000 /* Destinition Ring Last */
231#define HIFN_DMACSR_D_WAIT 0x04000000 /* Destinition Ring Waiting */
232#define HIFN_DMACSR_D_OVER 0x02000000 /* Destinition Ring Overflow */
233#define HIFN_DMACSR_R_CTRL 0x00c00000 /* Result Ring Control */
234#define HIFN_DMACSR_R_CTRL_NOP 0x00000000 /* Result Control: no-op */
235#define HIFN_DMACSR_R_CTRL_DIS 0x00400000 /* Result Control: disable */
236#define HIFN_DMACSR_R_CTRL_ENA 0x00800000 /* Result Control: enable */
237#define HIFN_DMACSR_R_ABORT 0x00200000 /* Result Ring PCI Abort */
238#define HIFN_DMACSR_R_DONE 0x00100000 /* Result Ring Done */
239#define HIFN_DMACSR_R_LAST 0x00080000 /* Result Ring Last */
240#define HIFN_DMACSR_R_WAIT 0x00040000 /* Result Ring Waiting */
241#define HIFN_DMACSR_R_OVER 0x00020000 /* Result Ring Overflow */
242#define HIFN_DMACSR_S_CTRL 0x0000c000 /* Source Ring Control */
243#define HIFN_DMACSR_S_CTRL_NOP 0x00000000 /* Source Control: no-op */
244#define HIFN_DMACSR_S_CTRL_DIS 0x00004000 /* Source Control: disable */
245#define HIFN_DMACSR_S_CTRL_ENA 0x00008000 /* Source Control: enable */
246#define HIFN_DMACSR_S_ABORT 0x00002000 /* Source Ring PCI Abort */
247#define HIFN_DMACSR_S_DONE 0x00001000 /* Source Ring Done */
248#define HIFN_DMACSR_S_LAST 0x00000800 /* Source Ring Last */
249#define HIFN_DMACSR_S_WAIT 0x00000400 /* Source Ring Waiting */
250#define HIFN_DMACSR_ILLW 0x00000200 /* Illegal write (7811 only) */
251#define HIFN_DMACSR_ILLR 0x00000100 /* Illegal read (7811 only) */
252#define HIFN_DMACSR_C_CTRL 0x000000c0 /* Command Ring Control */
253#define HIFN_DMACSR_C_CTRL_NOP 0x00000000 /* Command Control: no-op */
254#define HIFN_DMACSR_C_CTRL_DIS 0x00000040 /* Command Control: disable */
255#define HIFN_DMACSR_C_CTRL_ENA 0x00000080 /* Command Control: enable */
256#define HIFN_DMACSR_C_ABORT 0x00000020 /* Command Ring PCI Abort */
257#define HIFN_DMACSR_C_DONE 0x00000010 /* Command Ring Done */
258#define HIFN_DMACSR_C_LAST 0x00000008 /* Command Ring Last */
259#define HIFN_DMACSR_C_WAIT 0x00000004 /* Command Ring Waiting */
260#define HIFN_DMACSR_PUBDONE 0x00000002 /* Public op done (7951 only) */
261#define HIFN_DMACSR_ENGINE 0x00000001 /* Command Ring Engine IRQ */
262
263/* DMA Interrupt Enable Register (HIFN_1_DMA_IER) */
264#define HIFN_DMAIER_D_ABORT 0x20000000 /* Destination Ring PCIAbort */
265#define HIFN_DMAIER_D_DONE 0x10000000 /* Destination Ring Done */
266#define HIFN_DMAIER_D_LAST 0x08000000 /* Destination Ring Last */
267#define HIFN_DMAIER_D_WAIT 0x04000000 /* Destination Ring Waiting */
268#define HIFN_DMAIER_D_OVER 0x02000000 /* Destination Ring Overflow */
269#define HIFN_DMAIER_R_ABORT 0x00200000 /* Result Ring PCI Abort */
270#define HIFN_DMAIER_R_DONE 0x00100000 /* Result Ring Done */
271#define HIFN_DMAIER_R_LAST 0x00080000 /* Result Ring Last */
272#define HIFN_DMAIER_R_WAIT 0x00040000 /* Result Ring Waiting */
273#define HIFN_DMAIER_R_OVER 0x00020000 /* Result Ring Overflow */
274#define HIFN_DMAIER_S_ABORT 0x00002000 /* Source Ring PCI Abort */
275#define HIFN_DMAIER_S_DONE 0x00001000 /* Source Ring Done */
276#define HIFN_DMAIER_S_LAST 0x00000800 /* Source Ring Last */
277#define HIFN_DMAIER_S_WAIT 0x00000400 /* Source Ring Waiting */
278#define HIFN_DMAIER_ILLW 0x00000200 /* Illegal write (7811 only) */
279#define HIFN_DMAIER_ILLR 0x00000100 /* Illegal read (7811 only) */
280#define HIFN_DMAIER_C_ABORT 0x00000020 /* Command Ring PCI Abort */
281#define HIFN_DMAIER_C_DONE 0x00000010 /* Command Ring Done */
282#define HIFN_DMAIER_C_LAST 0x00000008 /* Command Ring Last */
283#define HIFN_DMAIER_C_WAIT 0x00000004 /* Command Ring Waiting */
284#define HIFN_DMAIER_PUBDONE 0x00000002 /* public op done (7951 only) */
285#define HIFN_DMAIER_ENGINE 0x00000001 /* Engine IRQ */
286
287/* DMA Configuration Register (HIFN_1_DMA_CNFG) */
288#define HIFN_DMACNFG_BIGENDIAN 0x10000000 /* big endian mode */
289#define HIFN_DMACNFG_POLLFREQ 0x00ff0000 /* Poll frequency mask */
290#define HIFN_DMACNFG_UNLOCK 0x00000800
291#define HIFN_DMACNFG_POLLINVAL 0x00000700 /* Invalid Poll Scalar */
292#define HIFN_DMACNFG_LAST 0x00000010 /* Host control LAST bit */
293#define HIFN_DMACNFG_MODE 0x00000004 /* DMA mode */
294#define HIFN_DMACNFG_DMARESET 0x00000002 /* DMA Reset # */
295#define HIFN_DMACNFG_MSTRESET 0x00000001 /* Master Reset # */
296
297/* PLL configuration register */
298#define HIFN_PLL_REF_CLK_HBI 0x00000000 /* HBI reference clock */
299#define HIFN_PLL_REF_CLK_PLL 0x00000001 /* PLL reference clock */
300#define HIFN_PLL_BP 0x00000002 /* Reference clock bypass */
301#define HIFN_PLL_PK_CLK_HBI 0x00000000 /* PK engine HBI clock */
302#define HIFN_PLL_PK_CLK_PLL 0x00000008 /* PK engine PLL clock */
303#define HIFN_PLL_PE_CLK_HBI 0x00000000 /* PE engine HBI clock */
304#define HIFN_PLL_PE_CLK_PLL 0x00000010 /* PE engine PLL clock */
305#define HIFN_PLL_RESERVED_1 0x00000400 /* Reserved bit, must be 1 */
306#define HIFN_PLL_ND_SHIFT 11 /* Clock multiplier shift */
307#define HIFN_PLL_ND_MULT_2 0x00000000 /* PLL clock multiplier 2 */
308#define HIFN_PLL_ND_MULT_4 0x00000800 /* PLL clock multiplier 4 */
309#define HIFN_PLL_ND_MULT_6 0x00001000 /* PLL clock multiplier 6 */
310#define HIFN_PLL_ND_MULT_8 0x00001800 /* PLL clock multiplier 8 */
311#define HIFN_PLL_ND_MULT_10 0x00002000 /* PLL clock multiplier 10 */
312#define HIFN_PLL_ND_MULT_12 0x00002800 /* PLL clock multiplier 12 */
313#define HIFN_PLL_IS_1_8 0x00000000 /* charge pump (mult. 1-8) */
314#define HIFN_PLL_IS_9_12 0x00010000 /* charge pump (mult. 9-12) */
315
316#define HIFN_PLL_FCK_MAX 266 /* Maximum PLL frequency */
317
318/* Public key reset register (HIFN_1_PUB_RESET) */
319#define HIFN_PUBRST_RESET 0x00000001 /* reset public/rng unit */
320
321/* Public base address register (HIFN_1_PUB_BASE) */
322#define HIFN_PUBBASE_ADDR 0x00003fff /* base address */
323
324/* Public operand length register (HIFN_1_PUB_OPLEN) */
325#define HIFN_PUBOPLEN_MOD_M 0x0000007f /* modulus length mask */
326#define HIFN_PUBOPLEN_MOD_S 0 /* modulus length shift */
327#define HIFN_PUBOPLEN_EXP_M 0x0003ff80 /* exponent length mask */
328#define HIFN_PUBOPLEN_EXP_S 7 /* exponent lenght shift */
329#define HIFN_PUBOPLEN_RED_M 0x003c0000 /* reducend length mask */
330#define HIFN_PUBOPLEN_RED_S 18 /* reducend length shift */
331
332/* Public operation register (HIFN_1_PUB_OP) */
333#define HIFN_PUBOP_AOFFSET_M 0x0000007f /* A offset mask */
334#define HIFN_PUBOP_AOFFSET_S 0 /* A offset shift */
335#define HIFN_PUBOP_BOFFSET_M 0x00000f80 /* B offset mask */
336#define HIFN_PUBOP_BOFFSET_S 7 /* B offset shift */
337#define HIFN_PUBOP_MOFFSET_M 0x0003f000 /* M offset mask */
338#define HIFN_PUBOP_MOFFSET_S 12 /* M offset shift */
339#define HIFN_PUBOP_OP_MASK 0x003c0000 /* Opcode: */
340#define HIFN_PUBOP_OP_NOP 0x00000000 /* NOP */
341#define HIFN_PUBOP_OP_ADD 0x00040000 /* ADD */
342#define HIFN_PUBOP_OP_ADDC 0x00080000 /* ADD w/carry */
343#define HIFN_PUBOP_OP_SUB 0x000c0000 /* SUB */
344#define HIFN_PUBOP_OP_SUBC 0x00100000 /* SUB w/carry */
345#define HIFN_PUBOP_OP_MODADD 0x00140000 /* Modular ADD */
346#define HIFN_PUBOP_OP_MODSUB 0x00180000 /* Modular SUB */
347#define HIFN_PUBOP_OP_INCA 0x001c0000 /* INC A */
348#define HIFN_PUBOP_OP_DECA 0x00200000 /* DEC A */
349#define HIFN_PUBOP_OP_MULT 0x00240000 /* MULT */
350#define HIFN_PUBOP_OP_MODMULT 0x00280000 /* Modular MULT */
351#define HIFN_PUBOP_OP_MODRED 0x002c0000 /* Modular RED */
352#define HIFN_PUBOP_OP_MODEXP 0x00300000 /* Modular EXP */
353
354/* Public status register (HIFN_1_PUB_STATUS) */
355#define HIFN_PUBSTS_DONE 0x00000001 /* operation done */
356#define HIFN_PUBSTS_CARRY 0x00000002 /* carry */
357
358/* Public interrupt enable register (HIFN_1_PUB_IEN) */
359#define HIFN_PUBIEN_DONE 0x00000001 /* operation done interrupt */
360
361/* Random number generator config register (HIFN_1_RNG_CONFIG) */
362#define HIFN_RNGCFG_ENA 0x00000001 /* enable rng */
363
364#define HIFN_NAMESIZE 32
365#define HIFN_MAX_RESULT_ORDER 5
366
367#define HIFN_D_CMD_RSIZE 24*4
368#define HIFN_D_SRC_RSIZE 80*4
369#define HIFN_D_DST_RSIZE 80*4
370#define HIFN_D_RES_RSIZE 24*4
371
372#define HIFN_QUEUE_LENGTH HIFN_D_CMD_RSIZE-5
373
374#define AES_MIN_KEY_SIZE 16
375#define AES_MAX_KEY_SIZE 32
376
377#define HIFN_DES_KEY_LENGTH 8
378#define HIFN_3DES_KEY_LENGTH 24
379#define HIFN_MAX_CRYPT_KEY_LENGTH AES_MAX_KEY_SIZE
380#define HIFN_IV_LENGTH 8
381#define HIFN_AES_IV_LENGTH 16
382#define HIFN_MAX_IV_LENGTH HIFN_AES_IV_LENGTH
383
384#define HIFN_MAC_KEY_LENGTH 64
385#define HIFN_MD5_LENGTH 16
386#define HIFN_SHA1_LENGTH 20
387#define HIFN_MAC_TRUNC_LENGTH 12
388
389#define HIFN_MAX_COMMAND (8 + 8 + 8 + 64 + 260)
390#define HIFN_MAX_RESULT (8 + 4 + 4 + 20 + 4)
391#define HIFN_USED_RESULT 12
392
393struct hifn_desc
394{
395 volatile u32 l;
396 volatile u32 p;
397};
398
399struct hifn_dma {
400 struct hifn_desc cmdr[HIFN_D_CMD_RSIZE+1];
401 struct hifn_desc srcr[HIFN_D_SRC_RSIZE+1];
402 struct hifn_desc dstr[HIFN_D_DST_RSIZE+1];
403 struct hifn_desc resr[HIFN_D_RES_RSIZE+1];
404
405 u8 command_bufs[HIFN_D_CMD_RSIZE][HIFN_MAX_COMMAND];
406 u8 result_bufs[HIFN_D_CMD_RSIZE][HIFN_MAX_RESULT];
407
408 u64 test_src, test_dst;
409
410 /*
411 * Our current positions for insertion and removal from the descriptor
412 * rings.
413 */
414 volatile int cmdi, srci, dsti, resi;
415 volatile int cmdu, srcu, dstu, resu;
416 int cmdk, srck, dstk, resk;
417};
418
419#define HIFN_FLAG_CMD_BUSY (1<<0)
420#define HIFN_FLAG_SRC_BUSY (1<<1)
421#define HIFN_FLAG_DST_BUSY (1<<2)
422#define HIFN_FLAG_RES_BUSY (1<<3)
423#define HIFN_FLAG_OLD_KEY (1<<4)
424
425#define HIFN_DEFAULT_ACTIVE_NUM 5
426
427struct hifn_device
428{
429 char name[HIFN_NAMESIZE];
430
431 int irq;
432
433 struct pci_dev *pdev;
434 void __iomem *bar[3];
435
436 unsigned long result_mem;
437 dma_addr_t dst;
438
439 void *desc_virt;
440 dma_addr_t desc_dma;
441
442 u32 dmareg;
443
444 void *sa[HIFN_D_RES_RSIZE];
445
446 spinlock_t lock;
447
448 void *priv;
449
450 u32 flags;
451 int active, started;
452 struct delayed_work work;
453 unsigned long reset;
454 unsigned long success;
455 unsigned long prev_success;
456
457 u8 snum;
458
459 struct tasklet_struct tasklet;
460
461 struct crypto_queue queue;
462 struct list_head alg_list;
463
464 unsigned int pk_clk_freq;
465
466#if defined(CONFIG_HW_RANDOM) || defined(CONFIG_HW_RANDOM_MODULE)
467 unsigned int rng_wait_time;
468 ktime_t rngtime;
469 struct hwrng rng;
470#endif
471};
472
473#define HIFN_D_LENGTH 0x0000ffff
474#define HIFN_D_NOINVALID 0x01000000
475#define HIFN_D_MASKDONEIRQ 0x02000000
476#define HIFN_D_DESTOVER 0x04000000
477#define HIFN_D_OVER 0x08000000
478#define HIFN_D_LAST 0x20000000
479#define HIFN_D_JUMP 0x40000000
480#define HIFN_D_VALID 0x80000000
481
482struct hifn_base_command
483{
484 volatile u16 masks;
485 volatile u16 session_num;
486 volatile u16 total_source_count;
487 volatile u16 total_dest_count;
488};
489
490#define HIFN_BASE_CMD_COMP 0x0100 /* enable compression engine */
491#define HIFN_BASE_CMD_PAD 0x0200 /* enable padding engine */
492#define HIFN_BASE_CMD_MAC 0x0400 /* enable MAC engine */
493#define HIFN_BASE_CMD_CRYPT 0x0800 /* enable crypt engine */
494#define HIFN_BASE_CMD_DECODE 0x2000
495#define HIFN_BASE_CMD_SRCLEN_M 0xc000
496#define HIFN_BASE_CMD_SRCLEN_S 14
497#define HIFN_BASE_CMD_DSTLEN_M 0x3000
498#define HIFN_BASE_CMD_DSTLEN_S 12
499#define HIFN_BASE_CMD_LENMASK_HI 0x30000
500#define HIFN_BASE_CMD_LENMASK_LO 0x0ffff
501
502/*
503 * Structure to help build up the command data structure.
504 */
505struct hifn_crypt_command
506{
507 volatile u16 masks;
508 volatile u16 header_skip;
509 volatile u16 source_count;
510 volatile u16 reserved;
511};
512
513#define HIFN_CRYPT_CMD_ALG_MASK 0x0003 /* algorithm: */
514#define HIFN_CRYPT_CMD_ALG_DES 0x0000 /* DES */
515#define HIFN_CRYPT_CMD_ALG_3DES 0x0001 /* 3DES */
516#define HIFN_CRYPT_CMD_ALG_RC4 0x0002 /* RC4 */
517#define HIFN_CRYPT_CMD_ALG_AES 0x0003 /* AES */
518#define HIFN_CRYPT_CMD_MODE_MASK 0x0018 /* Encrypt mode: */
519#define HIFN_CRYPT_CMD_MODE_ECB 0x0000 /* ECB */
520#define HIFN_CRYPT_CMD_MODE_CBC 0x0008 /* CBC */
521#define HIFN_CRYPT_CMD_MODE_CFB 0x0010 /* CFB */
522#define HIFN_CRYPT_CMD_MODE_OFB 0x0018 /* OFB */
523#define HIFN_CRYPT_CMD_CLR_CTX 0x0040 /* clear context */
524#define HIFN_CRYPT_CMD_KSZ_MASK 0x0600 /* AES key size: */
525#define HIFN_CRYPT_CMD_KSZ_128 0x0000 /* 128 bit */
526#define HIFN_CRYPT_CMD_KSZ_192 0x0200 /* 192 bit */
527#define HIFN_CRYPT_CMD_KSZ_256 0x0400 /* 256 bit */
528#define HIFN_CRYPT_CMD_NEW_KEY 0x0800 /* expect new key */
529#define HIFN_CRYPT_CMD_NEW_IV 0x1000 /* expect new iv */
530#define HIFN_CRYPT_CMD_SRCLEN_M 0xc000
531#define HIFN_CRYPT_CMD_SRCLEN_S 14
532
533/*
534 * Structure to help build up the command data structure.
535 */
536struct hifn_mac_command
537{
538 volatile u16 masks;
539 volatile u16 header_skip;
540 volatile u16 source_count;
541 volatile u16 reserved;
542};
543
544#define HIFN_MAC_CMD_ALG_MASK 0x0001
545#define HIFN_MAC_CMD_ALG_SHA1 0x0000
546#define HIFN_MAC_CMD_ALG_MD5 0x0001
547#define HIFN_MAC_CMD_MODE_MASK 0x000c
548#define HIFN_MAC_CMD_MODE_HMAC 0x0000
549#define HIFN_MAC_CMD_MODE_SSL_MAC 0x0004
550#define HIFN_MAC_CMD_MODE_HASH 0x0008
551#define HIFN_MAC_CMD_MODE_FULL 0x0004
552#define HIFN_MAC_CMD_TRUNC 0x0010
553#define HIFN_MAC_CMD_RESULT 0x0020
554#define HIFN_MAC_CMD_APPEND 0x0040
555#define HIFN_MAC_CMD_SRCLEN_M 0xc000
556#define HIFN_MAC_CMD_SRCLEN_S 14
557
558/*
559 * MAC POS IPsec initiates authentication after encryption on encodes
560 * and before decryption on decodes.
561 */
562#define HIFN_MAC_CMD_POS_IPSEC 0x0200
563#define HIFN_MAC_CMD_NEW_KEY 0x0800
564
565struct hifn_comp_command
566{
567 volatile u16 masks;
568 volatile u16 header_skip;
569 volatile u16 source_count;
570 volatile u16 reserved;
571};
572
573#define HIFN_COMP_CMD_SRCLEN_M 0xc000
574#define HIFN_COMP_CMD_SRCLEN_S 14
575#define HIFN_COMP_CMD_ONE 0x0100 /* must be one */
576#define HIFN_COMP_CMD_CLEARHIST 0x0010 /* clear history */
577#define HIFN_COMP_CMD_UPDATEHIST 0x0008 /* update history */
578#define HIFN_COMP_CMD_LZS_STRIP0 0x0004 /* LZS: strip zero */
579#define HIFN_COMP_CMD_MPPC_RESTART 0x0004 /* MPPC: restart */
580#define HIFN_COMP_CMD_ALG_MASK 0x0001 /* compression mode: */
581#define HIFN_COMP_CMD_ALG_MPPC 0x0001 /* MPPC */
582#define HIFN_COMP_CMD_ALG_LZS 0x0000 /* LZS */
583
584struct hifn_base_result
585{
586 volatile u16 flags;
587 volatile u16 session;
588 volatile u16 src_cnt; /* 15:0 of source count */
589 volatile u16 dst_cnt; /* 15:0 of dest count */
590};
591
592#define HIFN_BASE_RES_DSTOVERRUN 0x0200 /* destination overrun */
593#define HIFN_BASE_RES_SRCLEN_M 0xc000 /* 17:16 of source count */
594#define HIFN_BASE_RES_SRCLEN_S 14
595#define HIFN_BASE_RES_DSTLEN_M 0x3000 /* 17:16 of dest count */
596#define HIFN_BASE_RES_DSTLEN_S 12
597
598struct hifn_comp_result
599{
600 volatile u16 flags;
601 volatile u16 crc;
602};
603
604#define HIFN_COMP_RES_LCB_M 0xff00 /* longitudinal check byte */
605#define HIFN_COMP_RES_LCB_S 8
606#define HIFN_COMP_RES_RESTART 0x0004 /* MPPC: restart */
607#define HIFN_COMP_RES_ENDMARKER 0x0002 /* LZS: end marker seen */
608#define HIFN_COMP_RES_SRC_NOTZERO 0x0001 /* source expired */
609
610struct hifn_mac_result
611{
612 volatile u16 flags;
613 volatile u16 reserved;
614 /* followed by 0, 6, 8, or 10 u16's of the MAC, then crypt */
615};
616
617#define HIFN_MAC_RES_MISCOMPARE 0x0002 /* compare failed */
618#define HIFN_MAC_RES_SRC_NOTZERO 0x0001 /* source expired */
619
620struct hifn_crypt_result
621{
622 volatile u16 flags;
623 volatile u16 reserved;
624};
625
626#define HIFN_CRYPT_RES_SRC_NOTZERO 0x0001 /* source expired */
627
628#ifndef HIFN_POLL_FREQUENCY
629#define HIFN_POLL_FREQUENCY 0x1
630#endif
631
632#ifndef HIFN_POLL_SCALAR
633#define HIFN_POLL_SCALAR 0x0
634#endif
635
636#define HIFN_MAX_SEGLEN 0xffff /* maximum dma segment len */
637#define HIFN_MAX_DMALEN 0x3ffff /* maximum dma length */
638
639struct hifn_crypto_alg
640{
641 struct list_head entry;
642 struct crypto_alg alg;
643 struct hifn_device *dev;
644};
645
646#define ASYNC_SCATTERLIST_CACHE 16
647
648#define ASYNC_FLAGS_MISALIGNED (1<<0)
649
650struct ablkcipher_walk
651{
652 struct scatterlist cache[ASYNC_SCATTERLIST_CACHE];
653 u32 flags;
654 int num;
655};
656
657struct hifn_context
658{
659 u8 key[HIFN_MAX_CRYPT_KEY_LENGTH], *iv;
660 struct hifn_device *dev;
661 unsigned int keysize, ivsize;
662 u8 op, type, mode, unused;
663 struct ablkcipher_walk walk;
664 atomic_t sg_num;
665};
666
667#define crypto_alg_to_hifn(a) container_of(a, struct hifn_crypto_alg, alg)
668
669static inline u32 hifn_read_0(struct hifn_device *dev, u32 reg)
670{
671 u32 ret;
672
673 ret = readl((char *)(dev->bar[0]) + reg);
674
675 return ret;
676}
677
678static inline u32 hifn_read_1(struct hifn_device *dev, u32 reg)
679{
680 u32 ret;
681
682 ret = readl((char *)(dev->bar[1]) + reg);
683
684 return ret;
685}
686
687static inline void hifn_write_0(struct hifn_device *dev, u32 reg, u32 val)
688{
689 writel(val, (char *)(dev->bar[0]) + reg);
690}
691
692static inline void hifn_write_1(struct hifn_device *dev, u32 reg, u32 val)
693{
694 writel(val, (char *)(dev->bar[1]) + reg);
695}
696
697static void hifn_wait_puc(struct hifn_device *dev)
698{
699 int i;
700 u32 ret;
701
702 for (i=10000; i > 0; --i) {
703 ret = hifn_read_0(dev, HIFN_0_PUCTRL);
704 if (!(ret & HIFN_PUCTRL_RESET))
705 break;
706
707 udelay(1);
708 }
709
710 if (!i)
711 dprintk("%s: Failed to reset PUC unit.\n", dev->name);
712}
713
714static void hifn_reset_puc(struct hifn_device *dev)
715{
716 hifn_write_0(dev, HIFN_0_PUCTRL, HIFN_PUCTRL_DMAENA);
717 hifn_wait_puc(dev);
718}
719
720static void hifn_stop_device(struct hifn_device *dev)
721{
722 hifn_write_1(dev, HIFN_1_DMA_CSR,
723 HIFN_DMACSR_D_CTRL_DIS | HIFN_DMACSR_R_CTRL_DIS |
724 HIFN_DMACSR_S_CTRL_DIS | HIFN_DMACSR_C_CTRL_DIS);
725 hifn_write_0(dev, HIFN_0_PUIER, 0);
726 hifn_write_1(dev, HIFN_1_DMA_IER, 0);
727}
728
729static void hifn_reset_dma(struct hifn_device *dev, int full)
730{
731 hifn_stop_device(dev);
732
733 /*
734 * Setting poll frequency and others to 0.
735 */
736 hifn_write_1(dev, HIFN_1_DMA_CNFG, HIFN_DMACNFG_MSTRESET |
737 HIFN_DMACNFG_DMARESET | HIFN_DMACNFG_MODE);
738 mdelay(1);
739
740 /*
741 * Reset DMA.
742 */
743 if (full) {
744 hifn_write_1(dev, HIFN_1_DMA_CNFG, HIFN_DMACNFG_MODE);
745 mdelay(1);
746 } else {
747 hifn_write_1(dev, HIFN_1_DMA_CNFG, HIFN_DMACNFG_MODE |
748 HIFN_DMACNFG_MSTRESET);
749 hifn_reset_puc(dev);
750 }
751
752 hifn_write_1(dev, HIFN_1_DMA_CNFG, HIFN_DMACNFG_MSTRESET |
753 HIFN_DMACNFG_DMARESET | HIFN_DMACNFG_MODE);
754
755 hifn_reset_puc(dev);
756}
757
758static u32 hifn_next_signature(u_int32_t a, u_int cnt)
759{
760 int i;
761 u32 v;
762
763 for (i = 0; i < cnt; i++) {
764
765 /* get the parity */
766 v = a & 0x80080125;
767 v ^= v >> 16;
768 v ^= v >> 8;
769 v ^= v >> 4;
770 v ^= v >> 2;
771 v ^= v >> 1;
772
773 a = (v & 1) ^ (a << 1);
774 }
775
776 return a;
777}
778
779static struct pci2id {
780 u_short pci_vendor;
781 u_short pci_prod;
782 char card_id[13];
783} pci2id[] = {
784 {
785 PCI_VENDOR_ID_HIFN,
786 PCI_DEVICE_ID_HIFN_7955,
787 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
788 0x00, 0x00, 0x00, 0x00, 0x00 }
789 },
790 {
791 PCI_VENDOR_ID_HIFN,
792 PCI_DEVICE_ID_HIFN_7956,
793 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
794 0x00, 0x00, 0x00, 0x00, 0x00 }
795 }
796};
797
798#if defined(CONFIG_HW_RANDOM) || defined(CONFIG_HW_RANDOM_MODULE)
799static int hifn_rng_data_present(struct hwrng *rng, int wait)
800{
801 struct hifn_device *dev = (struct hifn_device *)rng->priv;
802 s64 nsec;
803
804 nsec = ktime_to_ns(ktime_sub(ktime_get(), dev->rngtime));
805 nsec -= dev->rng_wait_time;
806 if (nsec <= 0)
807 return 1;
808 if (!wait)
809 return 0;
810 ndelay(nsec);
811 return 1;
812}
813
814static int hifn_rng_data_read(struct hwrng *rng, u32 *data)
815{
816 struct hifn_device *dev = (struct hifn_device *)rng->priv;
817
818 *data = hifn_read_1(dev, HIFN_1_RNG_DATA);
819 dev->rngtime = ktime_get();
820 return 4;
821}
822
823static int hifn_register_rng(struct hifn_device *dev)
824{
825 /*
826 * We must wait at least 256 Pk_clk cycles between two reads of the rng.
827 */
828 dev->rng_wait_time = DIV_ROUND_UP(NSEC_PER_SEC, dev->pk_clk_freq) *
829 256;
830
831 dev->rng.name = dev->name;
832 dev->rng.data_present = hifn_rng_data_present,
833 dev->rng.data_read = hifn_rng_data_read,
834 dev->rng.priv = (unsigned long)dev;
835
836 return hwrng_register(&dev->rng);
837}
838
839static void hifn_unregister_rng(struct hifn_device *dev)
840{
841 hwrng_unregister(&dev->rng);
842}
843#else
844#define hifn_register_rng(dev) 0
845#define hifn_unregister_rng(dev)
846#endif
847
848static int hifn_init_pubrng(struct hifn_device *dev)
849{
850 int i;
851
852 hifn_write_1(dev, HIFN_1_PUB_RESET, hifn_read_1(dev, HIFN_1_PUB_RESET) |
853 HIFN_PUBRST_RESET);
854
855 for (i=100; i > 0; --i) {
856 mdelay(1);
857
858 if ((hifn_read_1(dev, HIFN_1_PUB_RESET) & HIFN_PUBRST_RESET) == 0)
859 break;
860 }
861
862 if (!i)
863 dprintk("Chip %s: Failed to initialise public key engine.\n",
864 dev->name);
865 else {
866 hifn_write_1(dev, HIFN_1_PUB_IEN, HIFN_PUBIEN_DONE);
867 dev->dmareg |= HIFN_DMAIER_PUBDONE;
868 hifn_write_1(dev, HIFN_1_DMA_IER, dev->dmareg);
869
870 dprintk("Chip %s: Public key engine has been sucessfully "
871 "initialised.\n", dev->name);
872 }
873
874 /*
875 * Enable RNG engine.
876 */
877
878 hifn_write_1(dev, HIFN_1_RNG_CONFIG,
879 hifn_read_1(dev, HIFN_1_RNG_CONFIG) | HIFN_RNGCFG_ENA);
880 dprintk("Chip %s: RNG engine has been successfully initialised.\n",
881 dev->name);
882
883#if defined(CONFIG_HW_RANDOM) || defined(CONFIG_HW_RANDOM_MODULE)
884 /* First value must be discarded */
885 hifn_read_1(dev, HIFN_1_RNG_DATA);
886 dev->rngtime = ktime_get();
887#endif
888 return 0;
889}
890
891static int hifn_enable_crypto(struct hifn_device *dev)
892{
893 u32 dmacfg, addr;
894 char *offtbl = NULL;
895 int i;
896
897 for (i = 0; i < sizeof(pci2id)/sizeof(pci2id[0]); i++) {
898 if (pci2id[i].pci_vendor == dev->pdev->vendor &&
899 pci2id[i].pci_prod == dev->pdev->device) {
900 offtbl = pci2id[i].card_id;
901 break;
902 }
903 }
904
905 if (offtbl == NULL) {
906 dprintk("Chip %s: Unknown card!\n", dev->name);
907 return -ENODEV;
908 }
909
910 dmacfg = hifn_read_1(dev, HIFN_1_DMA_CNFG);
911
912 hifn_write_1(dev, HIFN_1_DMA_CNFG,
913 HIFN_DMACNFG_UNLOCK | HIFN_DMACNFG_MSTRESET |
914 HIFN_DMACNFG_DMARESET | HIFN_DMACNFG_MODE);
915 mdelay(1);
916 addr = hifn_read_1(dev, HIFN_1_UNLOCK_SECRET1);
917 mdelay(1);
918 hifn_write_1(dev, HIFN_1_UNLOCK_SECRET2, 0);
919 mdelay(1);
920
921 for (i=0; i<12; ++i) {
922 addr = hifn_next_signature(addr, offtbl[i] + 0x101);
923 hifn_write_1(dev, HIFN_1_UNLOCK_SECRET2, addr);
924
925 mdelay(1);
926 }
927 hifn_write_1(dev, HIFN_1_DMA_CNFG, dmacfg);
928
929 dprintk("Chip %s: %s.\n", dev->name, pci_name(dev->pdev));
930
931 return 0;
932}
933
934static void hifn_init_dma(struct hifn_device *dev)
935{
936 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
937 u32 dptr = dev->desc_dma;
938 int i;
939
940 for (i=0; i<HIFN_D_CMD_RSIZE; ++i)
941 dma->cmdr[i].p = __cpu_to_le32(dptr +
942 offsetof(struct hifn_dma, command_bufs[i][0]));
943 for (i=0; i<HIFN_D_RES_RSIZE; ++i)
944 dma->resr[i].p = __cpu_to_le32(dptr +
945 offsetof(struct hifn_dma, result_bufs[i][0]));
946
947 /*
948 * Setup LAST descriptors.
949 */
950 dma->cmdr[HIFN_D_CMD_RSIZE].p = __cpu_to_le32(dptr +
951 offsetof(struct hifn_dma, cmdr[0]));
952 dma->srcr[HIFN_D_SRC_RSIZE].p = __cpu_to_le32(dptr +
953 offsetof(struct hifn_dma, srcr[0]));
954 dma->dstr[HIFN_D_DST_RSIZE].p = __cpu_to_le32(dptr +
955 offsetof(struct hifn_dma, dstr[0]));
956 dma->resr[HIFN_D_RES_RSIZE].p = __cpu_to_le32(dptr +
957 offsetof(struct hifn_dma, resr[0]));
958
959 dma->cmdu = dma->srcu = dma->dstu = dma->resu = 0;
960 dma->cmdi = dma->srci = dma->dsti = dma->resi = 0;
961 dma->cmdk = dma->srck = dma->dstk = dma->resk = 0;
962}
963
964/*
965 * Initialize the PLL. We need to know the frequency of the reference clock
966 * to calculate the optimal multiplier. For PCI we assume 66MHz, since that
967 * allows us to operate without the risk of overclocking the chip. If it
968 * actually uses 33MHz, the chip will operate at half the speed, this can be
969 * overriden by specifying the frequency as module parameter (pci33).
970 *
971 * Unfortunately the PCI clock is not very suitable since the HIFN needs a
972 * stable clock and the PCI clock frequency may vary, so the default is the
973 * external clock. There is no way to find out its frequency, we default to
974 * 66MHz since according to Mike Ham of HiFn, almost every board in existence
975 * has an external crystal populated at 66MHz.
976 */
977static void hifn_init_pll(struct hifn_device *dev)
978{
979 unsigned int freq, m;
980 u32 pllcfg;
981
982 pllcfg = HIFN_1_PLL | HIFN_PLL_RESERVED_1;
983
984 if (strncmp(hifn_pll_ref, "ext", 3) == 0)
985 pllcfg |= HIFN_PLL_REF_CLK_PLL;
986 else
987 pllcfg |= HIFN_PLL_REF_CLK_HBI;
988
989 if (hifn_pll_ref[3] != '\0')
990 freq = simple_strtoul(hifn_pll_ref + 3, NULL, 10);
991 else {
992 freq = 66;
993 printk(KERN_INFO "hifn795x: assuming %uMHz clock speed, "
994 "override with hifn_pll_ref=%.3s<frequency>\n",
995 freq, hifn_pll_ref);
996 }
997
998 m = HIFN_PLL_FCK_MAX / freq;
999
1000 pllcfg |= (m / 2 - 1) << HIFN_PLL_ND_SHIFT;
1001 if (m <= 8)
1002 pllcfg |= HIFN_PLL_IS_1_8;
1003 else
1004 pllcfg |= HIFN_PLL_IS_9_12;
1005
1006 /* Select clock source and enable clock bypass */
1007 hifn_write_1(dev, HIFN_1_PLL, pllcfg |
1008 HIFN_PLL_PK_CLK_HBI | HIFN_PLL_PE_CLK_HBI | HIFN_PLL_BP);
1009
1010 /* Let the chip lock to the input clock */
1011 mdelay(10);
1012
1013 /* Disable clock bypass */
1014 hifn_write_1(dev, HIFN_1_PLL, pllcfg |
1015 HIFN_PLL_PK_CLK_HBI | HIFN_PLL_PE_CLK_HBI);
1016
1017 /* Switch the engines to the PLL */
1018 hifn_write_1(dev, HIFN_1_PLL, pllcfg |
1019 HIFN_PLL_PK_CLK_PLL | HIFN_PLL_PE_CLK_PLL);
1020
1021 /*
1022 * The Fpk_clk runs at half the total speed. Its frequency is needed to
1023 * calculate the minimum time between two reads of the rng. Since 33MHz
1024 * is actually 33.333... we overestimate the frequency here, resulting
1025 * in slightly larger intervals.
1026 */
1027 dev->pk_clk_freq = 1000000 * (freq + 1) * m / 2;
1028}
1029
1030static void hifn_init_registers(struct hifn_device *dev)
1031{
1032 u32 dptr = dev->desc_dma;
1033
1034 /* Initialization magic... */
1035 hifn_write_0(dev, HIFN_0_PUCTRL, HIFN_PUCTRL_DMAENA);
1036 hifn_write_0(dev, HIFN_0_FIFOCNFG, HIFN_FIFOCNFG_THRESHOLD);
1037 hifn_write_0(dev, HIFN_0_PUIER, HIFN_PUIER_DSTOVER);
1038
1039 /* write all 4 ring address registers */
1040 hifn_write_1(dev, HIFN_1_DMA_CRAR, __cpu_to_le32(dptr +
1041 offsetof(struct hifn_dma, cmdr[0])));
1042 hifn_write_1(dev, HIFN_1_DMA_SRAR, __cpu_to_le32(dptr +
1043 offsetof(struct hifn_dma, srcr[0])));
1044 hifn_write_1(dev, HIFN_1_DMA_DRAR, __cpu_to_le32(dptr +
1045 offsetof(struct hifn_dma, dstr[0])));
1046 hifn_write_1(dev, HIFN_1_DMA_RRAR, __cpu_to_le32(dptr +
1047 offsetof(struct hifn_dma, resr[0])));
1048
1049 mdelay(2);
1050#if 0
1051 hifn_write_1(dev, HIFN_1_DMA_CSR,
1052 HIFN_DMACSR_D_CTRL_DIS | HIFN_DMACSR_R_CTRL_DIS |
1053 HIFN_DMACSR_S_CTRL_DIS | HIFN_DMACSR_C_CTRL_DIS |
1054 HIFN_DMACSR_D_ABORT | HIFN_DMACSR_D_DONE | HIFN_DMACSR_D_LAST |
1055 HIFN_DMACSR_D_WAIT | HIFN_DMACSR_D_OVER |
1056 HIFN_DMACSR_R_ABORT | HIFN_DMACSR_R_DONE | HIFN_DMACSR_R_LAST |
1057 HIFN_DMACSR_R_WAIT | HIFN_DMACSR_R_OVER |
1058 HIFN_DMACSR_S_ABORT | HIFN_DMACSR_S_DONE | HIFN_DMACSR_S_LAST |
1059 HIFN_DMACSR_S_WAIT |
1060 HIFN_DMACSR_C_ABORT | HIFN_DMACSR_C_DONE | HIFN_DMACSR_C_LAST |
1061 HIFN_DMACSR_C_WAIT |
1062 HIFN_DMACSR_ENGINE |
1063 HIFN_DMACSR_PUBDONE);
1064#else
1065 hifn_write_1(dev, HIFN_1_DMA_CSR,
1066 HIFN_DMACSR_C_CTRL_ENA | HIFN_DMACSR_S_CTRL_ENA |
1067 HIFN_DMACSR_D_CTRL_ENA | HIFN_DMACSR_R_CTRL_ENA |
1068 HIFN_DMACSR_D_ABORT | HIFN_DMACSR_D_DONE | HIFN_DMACSR_D_LAST |
1069 HIFN_DMACSR_D_WAIT | HIFN_DMACSR_D_OVER |
1070 HIFN_DMACSR_R_ABORT | HIFN_DMACSR_R_DONE | HIFN_DMACSR_R_LAST |
1071 HIFN_DMACSR_R_WAIT | HIFN_DMACSR_R_OVER |
1072 HIFN_DMACSR_S_ABORT | HIFN_DMACSR_S_DONE | HIFN_DMACSR_S_LAST |
1073 HIFN_DMACSR_S_WAIT |
1074 HIFN_DMACSR_C_ABORT | HIFN_DMACSR_C_DONE | HIFN_DMACSR_C_LAST |
1075 HIFN_DMACSR_C_WAIT |
1076 HIFN_DMACSR_ENGINE |
1077 HIFN_DMACSR_PUBDONE);
1078#endif
1079 hifn_read_1(dev, HIFN_1_DMA_CSR);
1080
1081 dev->dmareg |= HIFN_DMAIER_R_DONE | HIFN_DMAIER_C_ABORT |
1082 HIFN_DMAIER_D_OVER | HIFN_DMAIER_R_OVER |
1083 HIFN_DMAIER_S_ABORT | HIFN_DMAIER_D_ABORT | HIFN_DMAIER_R_ABORT |
1084 HIFN_DMAIER_ENGINE;
1085 dev->dmareg &= ~HIFN_DMAIER_C_WAIT;
1086
1087 hifn_write_1(dev, HIFN_1_DMA_IER, dev->dmareg);
1088 hifn_read_1(dev, HIFN_1_DMA_IER);
1089#if 0
1090 hifn_write_0(dev, HIFN_0_PUCNFG, HIFN_PUCNFG_ENCCNFG |
1091 HIFN_PUCNFG_DRFR_128 | HIFN_PUCNFG_TCALLPHASES |
1092 HIFN_PUCNFG_TCDRVTOTEM | HIFN_PUCNFG_BUS32 |
1093 HIFN_PUCNFG_DRAM);
1094#else
1095 hifn_write_0(dev, HIFN_0_PUCNFG, 0x10342);
1096#endif
1097 hifn_init_pll(dev);
1098
1099 hifn_write_0(dev, HIFN_0_PUISR, HIFN_PUISR_DSTOVER);
1100 hifn_write_1(dev, HIFN_1_DMA_CNFG, HIFN_DMACNFG_MSTRESET |
1101 HIFN_DMACNFG_DMARESET | HIFN_DMACNFG_MODE | HIFN_DMACNFG_LAST |
1102 ((HIFN_POLL_FREQUENCY << 16 ) & HIFN_DMACNFG_POLLFREQ) |
1103 ((HIFN_POLL_SCALAR << 8) & HIFN_DMACNFG_POLLINVAL));
1104}
1105
1106static int hifn_setup_base_command(struct hifn_device *dev, u8 *buf,
1107 unsigned dlen, unsigned slen, u16 mask, u8 snum)
1108{
1109 struct hifn_base_command *base_cmd;
1110 u8 *buf_pos = buf;
1111
1112 base_cmd = (struct hifn_base_command *)buf_pos;
1113 base_cmd->masks = __cpu_to_le16(mask);
1114 base_cmd->total_source_count =
1115 __cpu_to_le16(slen & HIFN_BASE_CMD_LENMASK_LO);
1116 base_cmd->total_dest_count =
1117 __cpu_to_le16(dlen & HIFN_BASE_CMD_LENMASK_LO);
1118
1119 dlen >>= 16;
1120 slen >>= 16;
1121 base_cmd->session_num = __cpu_to_le16(snum |
1122 ((slen << HIFN_BASE_CMD_SRCLEN_S) & HIFN_BASE_CMD_SRCLEN_M) |
1123 ((dlen << HIFN_BASE_CMD_DSTLEN_S) & HIFN_BASE_CMD_DSTLEN_M));
1124
1125 return sizeof(struct hifn_base_command);
1126}
1127
1128static int hifn_setup_crypto_command(struct hifn_device *dev,
1129 u8 *buf, unsigned dlen, unsigned slen,
1130 u8 *key, int keylen, u8 *iv, int ivsize, u16 mode)
1131{
1132 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1133 struct hifn_crypt_command *cry_cmd;
1134 u8 *buf_pos = buf;
1135 u16 cmd_len;
1136
1137 cry_cmd = (struct hifn_crypt_command *)buf_pos;
1138
1139 cry_cmd->source_count = __cpu_to_le16(dlen & 0xffff);
1140 dlen >>= 16;
1141 cry_cmd->masks = __cpu_to_le16(mode |
1142 ((dlen << HIFN_CRYPT_CMD_SRCLEN_S) &
1143 HIFN_CRYPT_CMD_SRCLEN_M));
1144 cry_cmd->header_skip = 0;
1145 cry_cmd->reserved = 0;
1146
1147 buf_pos += sizeof(struct hifn_crypt_command);
1148
1149 dma->cmdu++;
1150 if (dma->cmdu > 1) {
1151 dev->dmareg |= HIFN_DMAIER_C_WAIT;
1152 hifn_write_1(dev, HIFN_1_DMA_IER, dev->dmareg);
1153 }
1154
1155 if (keylen) {
1156 memcpy(buf_pos, key, keylen);
1157 buf_pos += keylen;
1158 }
1159 if (ivsize) {
1160 memcpy(buf_pos, iv, ivsize);
1161 buf_pos += ivsize;
1162 }
1163
1164 cmd_len = buf_pos - buf;
1165
1166 return cmd_len;
1167}
1168
1169static int hifn_setup_src_desc(struct hifn_device *dev, struct page *page,
1170 unsigned int offset, unsigned int size)
1171{
1172 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1173 int idx;
1174 dma_addr_t addr;
1175
1176 addr = pci_map_page(dev->pdev, page, offset, size, PCI_DMA_TODEVICE);
1177
1178 idx = dma->srci;
1179
1180 dma->srcr[idx].p = __cpu_to_le32(addr);
1181 dma->srcr[idx].l = __cpu_to_le32(size) | HIFN_D_VALID |
1182 HIFN_D_MASKDONEIRQ | HIFN_D_NOINVALID | HIFN_D_LAST;
1183
1184 if (++idx == HIFN_D_SRC_RSIZE) {
1185 dma->srcr[idx].l = __cpu_to_le32(HIFN_D_VALID |
1186 HIFN_D_JUMP |
1187 HIFN_D_MASKDONEIRQ | HIFN_D_LAST);
1188 idx = 0;
1189 }
1190
1191 dma->srci = idx;
1192 dma->srcu++;
1193
1194 if (!(dev->flags & HIFN_FLAG_SRC_BUSY)) {
1195 hifn_write_1(dev, HIFN_1_DMA_CSR, HIFN_DMACSR_S_CTRL_ENA);
1196 dev->flags |= HIFN_FLAG_SRC_BUSY;
1197 }
1198
1199 return size;
1200}
1201
1202static void hifn_setup_res_desc(struct hifn_device *dev)
1203{
1204 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1205
1206 dma->resr[dma->resi].l = __cpu_to_le32(HIFN_USED_RESULT |
1207 HIFN_D_VALID | HIFN_D_LAST);
1208 /*
1209 * dma->resr[dma->resi].l = __cpu_to_le32(HIFN_MAX_RESULT | HIFN_D_VALID |
1210 * HIFN_D_LAST | HIFN_D_NOINVALID);
1211 */
1212
1213 if (++dma->resi == HIFN_D_RES_RSIZE) {
1214 dma->resr[HIFN_D_RES_RSIZE].l = __cpu_to_le32(HIFN_D_VALID |
1215 HIFN_D_JUMP | HIFN_D_MASKDONEIRQ | HIFN_D_LAST);
1216 dma->resi = 0;
1217 }
1218
1219 dma->resu++;
1220
1221 if (!(dev->flags & HIFN_FLAG_RES_BUSY)) {
1222 hifn_write_1(dev, HIFN_1_DMA_CSR, HIFN_DMACSR_R_CTRL_ENA);
1223 dev->flags |= HIFN_FLAG_RES_BUSY;
1224 }
1225}
1226
1227static void hifn_setup_dst_desc(struct hifn_device *dev, struct page *page,
1228 unsigned offset, unsigned size)
1229{
1230 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1231 int idx;
1232 dma_addr_t addr;
1233
1234 addr = pci_map_page(dev->pdev, page, offset, size, PCI_DMA_FROMDEVICE);
1235
1236 idx = dma->dsti;
1237 dma->dstr[idx].p = __cpu_to_le32(addr);
1238 dma->dstr[idx].l = __cpu_to_le32(size | HIFN_D_VALID |
1239 HIFN_D_MASKDONEIRQ | HIFN_D_NOINVALID | HIFN_D_LAST);
1240
1241 if (++idx == HIFN_D_DST_RSIZE) {
1242 dma->dstr[idx].l = __cpu_to_le32(HIFN_D_VALID |
1243 HIFN_D_JUMP | HIFN_D_MASKDONEIRQ |
1244 HIFN_D_LAST | HIFN_D_NOINVALID);
1245 idx = 0;
1246 }
1247 dma->dsti = idx;
1248 dma->dstu++;
1249
1250 if (!(dev->flags & HIFN_FLAG_DST_BUSY)) {
1251 hifn_write_1(dev, HIFN_1_DMA_CSR, HIFN_DMACSR_D_CTRL_ENA);
1252 dev->flags |= HIFN_FLAG_DST_BUSY;
1253 }
1254}
1255
1256static int hifn_setup_dma(struct hifn_device *dev, struct page *spage, unsigned int soff,
1257 struct page *dpage, unsigned int doff, unsigned int nbytes, void *priv,
1258 struct hifn_context *ctx)
1259{
1260 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1261 int cmd_len, sa_idx;
1262 u8 *buf, *buf_pos;
1263 u16 mask;
1264
1265 dprintk("%s: spage: %p, soffset: %u, dpage: %p, doffset: %u, nbytes: %u, priv: %p, ctx: %p.\n",
1266 dev->name, spage, soff, dpage, doff, nbytes, priv, ctx);
1267
1268 sa_idx = dma->resi;
1269
1270 hifn_setup_src_desc(dev, spage, soff, nbytes);
1271
1272 buf_pos = buf = dma->command_bufs[dma->cmdi];
1273
1274 mask = 0;
1275 switch (ctx->op) {
1276 case ACRYPTO_OP_DECRYPT:
1277 mask = HIFN_BASE_CMD_CRYPT | HIFN_BASE_CMD_DECODE;
1278 break;
1279 case ACRYPTO_OP_ENCRYPT:
1280 mask = HIFN_BASE_CMD_CRYPT;
1281 break;
1282 case ACRYPTO_OP_HMAC:
1283 mask = HIFN_BASE_CMD_MAC;
1284 break;
1285 default:
1286 goto err_out;
1287 }
1288
1289 buf_pos += hifn_setup_base_command(dev, buf_pos, nbytes,
1290 nbytes, mask, dev->snum);
1291
1292 if (ctx->op == ACRYPTO_OP_ENCRYPT || ctx->op == ACRYPTO_OP_DECRYPT) {
1293 u16 md = 0;
1294
1295 if (ctx->keysize)
1296 md |= HIFN_CRYPT_CMD_NEW_KEY;
1297 if (ctx->iv && ctx->mode != ACRYPTO_MODE_ECB)
1298 md |= HIFN_CRYPT_CMD_NEW_IV;
1299
1300 switch (ctx->mode) {
1301 case ACRYPTO_MODE_ECB:
1302 md |= HIFN_CRYPT_CMD_MODE_ECB;
1303 break;
1304 case ACRYPTO_MODE_CBC:
1305 md |= HIFN_CRYPT_CMD_MODE_CBC;
1306 break;
1307 case ACRYPTO_MODE_CFB:
1308 md |= HIFN_CRYPT_CMD_MODE_CFB;
1309 break;
1310 case ACRYPTO_MODE_OFB:
1311 md |= HIFN_CRYPT_CMD_MODE_OFB;
1312 break;
1313 default:
1314 goto err_out;
1315 }
1316
1317 switch (ctx->type) {
1318 case ACRYPTO_TYPE_AES_128:
1319 if (ctx->keysize != 16)
1320 goto err_out;
1321 md |= HIFN_CRYPT_CMD_KSZ_128 |
1322 HIFN_CRYPT_CMD_ALG_AES;
1323 break;
1324 case ACRYPTO_TYPE_AES_192:
1325 if (ctx->keysize != 24)
1326 goto err_out;
1327 md |= HIFN_CRYPT_CMD_KSZ_192 |
1328 HIFN_CRYPT_CMD_ALG_AES;
1329 break;
1330 case ACRYPTO_TYPE_AES_256:
1331 if (ctx->keysize != 32)
1332 goto err_out;
1333 md |= HIFN_CRYPT_CMD_KSZ_256 |
1334 HIFN_CRYPT_CMD_ALG_AES;
1335 break;
1336 case ACRYPTO_TYPE_3DES:
1337 if (ctx->keysize != 24)
1338 goto err_out;
1339 md |= HIFN_CRYPT_CMD_ALG_3DES;
1340 break;
1341 case ACRYPTO_TYPE_DES:
1342 if (ctx->keysize != 8)
1343 goto err_out;
1344 md |= HIFN_CRYPT_CMD_ALG_DES;
1345 break;
1346 default:
1347 goto err_out;
1348 }
1349
1350 buf_pos += hifn_setup_crypto_command(dev, buf_pos,
1351 nbytes, nbytes, ctx->key, ctx->keysize,
1352 ctx->iv, ctx->ivsize, md);
1353 }
1354
1355 dev->sa[sa_idx] = priv;
1356
1357 cmd_len = buf_pos - buf;
1358 dma->cmdr[dma->cmdi].l = __cpu_to_le32(cmd_len | HIFN_D_VALID |
1359 HIFN_D_LAST | HIFN_D_MASKDONEIRQ);
1360
1361 if (++dma->cmdi == HIFN_D_CMD_RSIZE) {
1362 dma->cmdr[dma->cmdi].l = __cpu_to_le32(HIFN_MAX_COMMAND |
1363 HIFN_D_VALID | HIFN_D_LAST |
1364 HIFN_D_MASKDONEIRQ | HIFN_D_JUMP);
1365 dma->cmdi = 0;
1366 } else
1367 dma->cmdr[dma->cmdi-1].l |= __cpu_to_le32(HIFN_D_VALID);
1368
1369 if (!(dev->flags & HIFN_FLAG_CMD_BUSY)) {
1370 hifn_write_1(dev, HIFN_1_DMA_CSR, HIFN_DMACSR_C_CTRL_ENA);
1371 dev->flags |= HIFN_FLAG_CMD_BUSY;
1372 }
1373
1374 hifn_setup_dst_desc(dev, dpage, doff, nbytes);
1375 hifn_setup_res_desc(dev);
1376
1377 return 0;
1378
1379err_out:
1380 return -EINVAL;
1381}
1382
1383static int ablkcipher_walk_init(struct ablkcipher_walk *w,
1384 int num, gfp_t gfp_flags)
1385{
1386 int i;
1387
1388 num = min(ASYNC_SCATTERLIST_CACHE, num);
1389 sg_init_table(w->cache, num);
1390
1391 w->num = 0;
1392 for (i=0; i<num; ++i) {
1393 struct page *page = alloc_page(gfp_flags);
1394 struct scatterlist *s;
1395
1396 if (!page)
1397 break;
1398
1399 s = &w->cache[i];
1400
1401 sg_set_page(s, page, PAGE_SIZE, 0);
1402 w->num++;
1403 }
1404
1405 return i;
1406}
1407
1408static void ablkcipher_walk_exit(struct ablkcipher_walk *w)
1409{
1410 int i;
1411
1412 for (i=0; i<w->num; ++i) {
1413 struct scatterlist *s = &w->cache[i];
1414
1415 __free_page(sg_page(s));
1416
1417 s->length = 0;
1418 }
1419
1420 w->num = 0;
1421}
1422
1423static int ablkcipher_add(void *daddr, unsigned int *drestp, struct scatterlist *src,
1424 unsigned int size, unsigned int *nbytesp)
1425{
1426 unsigned int copy, drest = *drestp, nbytes = *nbytesp;
1427 int idx = 0;
1428 void *saddr;
1429
1430 if (drest < size || size > nbytes)
1431 return -EINVAL;
1432
1433 while (size) {
1434 copy = min(drest, src->length);
1435
1436 saddr = kmap_atomic(sg_page(src), KM_SOFTIRQ1);
1437 memcpy(daddr, saddr + src->offset, copy);
1438 kunmap_atomic(saddr, KM_SOFTIRQ1);
1439
1440 size -= copy;
1441 drest -= copy;
1442 nbytes -= copy;
1443 daddr += copy;
1444
1445 dprintk("%s: copy: %u, size: %u, drest: %u, nbytes: %u.\n",
1446 __func__, copy, size, drest, nbytes);
1447
1448 src++;
1449 idx++;
1450 }
1451
1452 *nbytesp = nbytes;
1453 *drestp = drest;
1454
1455 return idx;
1456}
1457
1458static int ablkcipher_walk(struct ablkcipher_request *req,
1459 struct ablkcipher_walk *w)
1460{
1461 unsigned blocksize =
1462 crypto_ablkcipher_blocksize(crypto_ablkcipher_reqtfm(req));
1463 unsigned alignmask =
1464 crypto_ablkcipher_alignmask(crypto_ablkcipher_reqtfm(req));
1465 struct scatterlist *src, *dst, *t;
1466 void *daddr;
1467 unsigned int nbytes = req->nbytes, offset, copy, diff;
1468 int idx, tidx, err;
1469
1470 tidx = idx = 0;
1471 offset = 0;
1472 while (nbytes) {
1473 if (idx >= w->num && (w->flags & ASYNC_FLAGS_MISALIGNED))
1474 return -EINVAL;
1475
1476 src = &req->src[idx];
1477 dst = &req->dst[idx];
1478
1479 dprintk("\n%s: slen: %u, dlen: %u, soff: %u, doff: %u, offset: %u, "
1480 "blocksize: %u, nbytes: %u.\n",
1481 __func__, src->length, dst->length, src->offset,
1482 dst->offset, offset, blocksize, nbytes);
1483
1484 if (src->length & (blocksize - 1) ||
1485 src->offset & (alignmask - 1) ||
1486 dst->length & (blocksize - 1) ||
1487 dst->offset & (alignmask - 1) ||
1488 offset) {
1489 unsigned slen = src->length - offset;
1490 unsigned dlen = PAGE_SIZE;
1491
1492 t = &w->cache[idx];
1493
1494 daddr = kmap_atomic(sg_page(t), KM_SOFTIRQ0);
1495 err = ablkcipher_add(daddr, &dlen, src, slen, &nbytes);
1496 if (err < 0)
1497 goto err_out_unmap;
1498
1499 idx += err;
1500
1501 copy = slen & ~(blocksize - 1);
1502 diff = slen & (blocksize - 1);
1503
1504 if (dlen < nbytes) {
1505 /*
1506 * Destination page does not have enough space
1507 * to put there additional blocksized chunk,
1508 * so we mark that page as containing only
1509 * blocksize aligned chunks:
1510 * t->length = (slen & ~(blocksize - 1));
1511 * and increase number of bytes to be processed
1512 * in next chunk:
1513 * nbytes += diff;
1514 */
1515 nbytes += diff;
1516
1517 /*
1518 * Temporary of course...
1519 * Kick author if you will catch this one.
1520 */
1521 printk(KERN_ERR "%s: dlen: %u, nbytes: %u,"
1522 "slen: %u, offset: %u.\n",
1523 __func__, dlen, nbytes, slen, offset);
1524 printk(KERN_ERR "%s: please contact author to fix this "
1525 "issue, generally you should not catch "
1526 "this path under any condition but who "
1527 "knows how did you use crypto code.\n"
1528 "Thank you.\n", __func__);
1529 BUG();
1530 } else {
1531 copy += diff + nbytes;
1532
1533 src = &req->src[idx];
1534
1535 err = ablkcipher_add(daddr + slen, &dlen, src, nbytes, &nbytes);
1536 if (err < 0)
1537 goto err_out_unmap;
1538
1539 idx += err;
1540 }
1541
1542 t->length = copy;
1543 t->offset = offset;
1544
1545 kunmap_atomic(daddr, KM_SOFTIRQ0);
1546 } else {
1547 nbytes -= src->length;
1548 idx++;
1549 }
1550
1551 tidx++;
1552 }
1553
1554 return tidx;
1555
1556err_out_unmap:
1557 kunmap_atomic(daddr, KM_SOFTIRQ0);
1558 return err;
1559}
1560
1561static int hifn_setup_session(struct ablkcipher_request *req)
1562{
1563 struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
1564 struct hifn_device *dev = ctx->dev;
1565 struct page *spage, *dpage;
1566 unsigned long soff, doff, flags;
1567 unsigned int nbytes = req->nbytes, idx = 0, len;
1568 int err = -EINVAL, sg_num;
1569 struct scatterlist *src, *dst, *t;
1570 unsigned blocksize =
1571 crypto_ablkcipher_blocksize(crypto_ablkcipher_reqtfm(req));
1572 unsigned alignmask =
1573 crypto_ablkcipher_alignmask(crypto_ablkcipher_reqtfm(req));
1574
1575 if (ctx->iv && !ctx->ivsize && ctx->mode != ACRYPTO_MODE_ECB)
1576 goto err_out_exit;
1577
1578 ctx->walk.flags = 0;
1579
1580 while (nbytes) {
1581 src = &req->src[idx];
1582 dst = &req->dst[idx];
1583
1584 if (src->length & (blocksize - 1) ||
1585 src->offset & (alignmask - 1) ||
1586 dst->length & (blocksize - 1) ||
1587 dst->offset & (alignmask - 1)) {
1588 ctx->walk.flags |= ASYNC_FLAGS_MISALIGNED;
1589 }
1590
1591 nbytes -= src->length;
1592 idx++;
1593 }
1594
1595 if (ctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
1596 err = ablkcipher_walk_init(&ctx->walk, idx, GFP_ATOMIC);
1597 if (err < 0)
1598 return err;
1599 }
1600
1601 nbytes = req->nbytes;
1602 idx = 0;
1603
1604 sg_num = ablkcipher_walk(req, &ctx->walk);
1605
1606 atomic_set(&ctx->sg_num, sg_num);
1607
1608 spin_lock_irqsave(&dev->lock, flags);
1609 if (dev->started + sg_num > HIFN_QUEUE_LENGTH) {
1610 err = -EAGAIN;
1611 goto err_out;
1612 }
1613
1614 dev->snum++;
1615 dev->started += sg_num;
1616
1617 while (nbytes) {
1618 src = &req->src[idx];
1619 dst = &req->dst[idx];
1620 t = &ctx->walk.cache[idx];
1621
1622 if (t->length) {
1623 spage = dpage = sg_page(t);
1624 soff = doff = 0;
1625 len = t->length;
1626 } else {
1627 spage = sg_page(src);
1628 soff = src->offset;
1629
1630 dpage = sg_page(dst);
1631 doff = dst->offset;
1632
1633 len = dst->length;
1634 }
1635
1636 idx++;
1637
1638 err = hifn_setup_dma(dev, spage, soff, dpage, doff, nbytes,
1639 req, ctx);
1640 if (err)
1641 goto err_out;
1642
1643 nbytes -= len;
1644 }
1645
1646 dev->active = HIFN_DEFAULT_ACTIVE_NUM;
1647 spin_unlock_irqrestore(&dev->lock, flags);
1648
1649 return 0;
1650
1651err_out:
1652 spin_unlock_irqrestore(&dev->lock, flags);
1653err_out_exit:
1654 if (err && printk_ratelimit())
1655 dprintk("%s: iv: %p [%d], key: %p [%d], mode: %u, op: %u, "
1656 "type: %u, err: %d.\n",
1657 dev->name, ctx->iv, ctx->ivsize,
1658 ctx->key, ctx->keysize,
1659 ctx->mode, ctx->op, ctx->type, err);
1660
1661 return err;
1662}
1663
1664static int hifn_test(struct hifn_device *dev, int encdec, u8 snum)
1665{
1666 int n, err;
1667 u8 src[16];
1668 struct hifn_context ctx;
1669 u8 fips_aes_ecb_from_zero[16] = {
1670 0x66, 0xE9, 0x4B, 0xD4,
1671 0xEF, 0x8A, 0x2C, 0x3B,
1672 0x88, 0x4C, 0xFA, 0x59,
1673 0xCA, 0x34, 0x2B, 0x2E};
1674
1675 memset(src, 0, sizeof(src));
1676 memset(ctx.key, 0, sizeof(ctx.key));
1677
1678 ctx.dev = dev;
1679 ctx.keysize = 16;
1680 ctx.ivsize = 0;
1681 ctx.iv = NULL;
1682 ctx.op = (encdec)?ACRYPTO_OP_ENCRYPT:ACRYPTO_OP_DECRYPT;
1683 ctx.mode = ACRYPTO_MODE_ECB;
1684 ctx.type = ACRYPTO_TYPE_AES_128;
1685 atomic_set(&ctx.sg_num, 1);
1686
1687 err = hifn_setup_dma(dev,
1688 virt_to_page(src), offset_in_page(src),
1689 virt_to_page(src), offset_in_page(src),
1690 sizeof(src), NULL, &ctx);
1691 if (err)
1692 goto err_out;
1693
1694 msleep(200);
1695
1696 dprintk("%s: decoded: ", dev->name);
1697 for (n=0; n<sizeof(src); ++n)
1698 dprintk("%02x ", src[n]);
1699 dprintk("\n");
1700 dprintk("%s: FIPS : ", dev->name);
1701 for (n=0; n<sizeof(fips_aes_ecb_from_zero); ++n)
1702 dprintk("%02x ", fips_aes_ecb_from_zero[n]);
1703 dprintk("\n");
1704
1705 if (!memcmp(src, fips_aes_ecb_from_zero, sizeof(fips_aes_ecb_from_zero))) {
1706 printk(KERN_INFO "%s: AES 128 ECB test has been successfully "
1707 "passed.\n", dev->name);
1708 return 0;
1709 }
1710
1711err_out:
1712 printk(KERN_INFO "%s: AES 128 ECB test has been failed.\n", dev->name);
1713 return -1;
1714}
1715
1716static int hifn_start_device(struct hifn_device *dev)
1717{
1718 int err;
1719
1720 hifn_reset_dma(dev, 1);
1721
1722 err = hifn_enable_crypto(dev);
1723 if (err)
1724 return err;
1725
1726 hifn_reset_puc(dev);
1727
1728 hifn_init_dma(dev);
1729
1730 hifn_init_registers(dev);
1731
1732 hifn_init_pubrng(dev);
1733
1734 return 0;
1735}
1736
1737static int ablkcipher_get(void *saddr, unsigned int *srestp, unsigned int offset,
1738 struct scatterlist *dst, unsigned int size, unsigned int *nbytesp)
1739{
1740 unsigned int srest = *srestp, nbytes = *nbytesp, copy;
1741 void *daddr;
1742 int idx = 0;
1743
1744 if (srest < size || size > nbytes)
1745 return -EINVAL;
1746
1747 while (size) {
1748
1749 copy = min(dst->length, srest);
1750
1751 daddr = kmap_atomic(sg_page(dst), KM_IRQ0);
1752 memcpy(daddr + dst->offset + offset, saddr, copy);
1753 kunmap_atomic(daddr, KM_IRQ0);
1754
1755 nbytes -= copy;
1756 size -= copy;
1757 srest -= copy;
1758 saddr += copy;
1759 offset = 0;
1760
1761 dprintk("%s: copy: %u, size: %u, srest: %u, nbytes: %u.\n",
1762 __func__, copy, size, srest, nbytes);
1763
1764 dst++;
1765 idx++;
1766 }
1767
1768 *nbytesp = nbytes;
1769 *srestp = srest;
1770
1771 return idx;
1772}
1773
1774static void hifn_process_ready(struct ablkcipher_request *req, int error)
1775{
1776 struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
1777 struct hifn_device *dev;
1778
1779 dprintk("%s: req: %p, ctx: %p.\n", __func__, req, ctx);
1780
1781 dev = ctx->dev;
1782 dprintk("%s: req: %p, started: %d, sg_num: %d.\n",
1783 __func__, req, dev->started, atomic_read(&ctx->sg_num));
1784
1785 if (--dev->started < 0)
1786 BUG();
1787
1788 if (atomic_dec_and_test(&ctx->sg_num)) {
1789 unsigned int nbytes = req->nbytes;
1790 int idx = 0, err;
1791 struct scatterlist *dst, *t;
1792 void *saddr;
1793
1794 if (ctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
1795 while (nbytes) {
1796 t = &ctx->walk.cache[idx];
1797 dst = &req->dst[idx];
1798
1799 dprintk("\n%s: sg_page(t): %p, t->length: %u, "
1800 "sg_page(dst): %p, dst->length: %u, "
1801 "nbytes: %u.\n",
1802 __func__, sg_page(t), t->length,
1803 sg_page(dst), dst->length, nbytes);
1804
1805 if (!t->length) {
1806 nbytes -= dst->length;
1807 idx++;
1808 continue;
1809 }
1810
1811 saddr = kmap_atomic(sg_page(t), KM_IRQ1);
1812
1813 err = ablkcipher_get(saddr, &t->length, t->offset,
1814 dst, nbytes, &nbytes);
1815 if (err < 0) {
1816 kunmap_atomic(saddr, KM_IRQ1);
1817 break;
1818 }
1819
1820 idx += err;
1821 kunmap_atomic(saddr, KM_IRQ1);
1822 }
1823
1824 ablkcipher_walk_exit(&ctx->walk);
1825 }
1826
1827 req->base.complete(&req->base, error);
1828 }
1829}
1830
1831static void hifn_check_for_completion(struct hifn_device *dev, int error)
1832{
1833 int i;
1834 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1835
1836 for (i=0; i<HIFN_D_RES_RSIZE; ++i) {
1837 struct hifn_desc *d = &dma->resr[i];
1838
1839 if (!(d->l & __cpu_to_le32(HIFN_D_VALID)) && dev->sa[i]) {
1840 dev->success++;
1841 dev->reset = 0;
1842 hifn_process_ready(dev->sa[i], error);
1843 dev->sa[i] = NULL;
1844 }
1845
1846 if (d->l & __cpu_to_le32(HIFN_D_DESTOVER | HIFN_D_OVER))
1847 if (printk_ratelimit())
1848 printk("%s: overflow detected [d: %u, o: %u] "
1849 "at %d resr: l: %08x, p: %08x.\n",
1850 dev->name,
1851 !!(d->l & __cpu_to_le32(HIFN_D_DESTOVER)),
1852 !!(d->l & __cpu_to_le32(HIFN_D_OVER)),
1853 i, d->l, d->p);
1854 }
1855}
1856
1857static void hifn_clear_rings(struct hifn_device *dev)
1858{
1859 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1860 int i, u;
1861
1862 dprintk("%s: ring cleanup 1: i: %d.%d.%d.%d, u: %d.%d.%d.%d, "
1863 "k: %d.%d.%d.%d.\n",
1864 dev->name,
1865 dma->cmdi, dma->srci, dma->dsti, dma->resi,
1866 dma->cmdu, dma->srcu, dma->dstu, dma->resu,
1867 dma->cmdk, dma->srck, dma->dstk, dma->resk);
1868
1869 i = dma->resk; u = dma->resu;
1870 while (u != 0) {
1871 if (dma->resr[i].l & __cpu_to_le32(HIFN_D_VALID))
1872 break;
1873
1874 if (i != HIFN_D_RES_RSIZE)
1875 u--;
1876
1877 if (++i == (HIFN_D_RES_RSIZE + 1))
1878 i = 0;
1879 }
1880 dma->resk = i; dma->resu = u;
1881
1882 i = dma->srck; u = dma->srcu;
1883 while (u != 0) {
1884 if (i == HIFN_D_SRC_RSIZE)
1885 i = 0;
1886 if (dma->srcr[i].l & __cpu_to_le32(HIFN_D_VALID))
1887 break;
1888 i++, u--;
1889 }
1890 dma->srck = i; dma->srcu = u;
1891
1892 i = dma->cmdk; u = dma->cmdu;
1893 while (u != 0) {
1894 if (dma->cmdr[i].l & __cpu_to_le32(HIFN_D_VALID))
1895 break;
1896 if (i != HIFN_D_CMD_RSIZE)
1897 u--;
1898 if (++i == (HIFN_D_CMD_RSIZE + 1))
1899 i = 0;
1900 }
1901 dma->cmdk = i; dma->cmdu = u;
1902
1903 i = dma->dstk; u = dma->dstu;
1904 while (u != 0) {
1905 if (i == HIFN_D_DST_RSIZE)
1906 i = 0;
1907 if (dma->dstr[i].l & __cpu_to_le32(HIFN_D_VALID))
1908 break;
1909 i++, u--;
1910 }
1911 dma->dstk = i; dma->dstu = u;
1912
1913 dprintk("%s: ring cleanup 2: i: %d.%d.%d.%d, u: %d.%d.%d.%d, "
1914 "k: %d.%d.%d.%d.\n",
1915 dev->name,
1916 dma->cmdi, dma->srci, dma->dsti, dma->resi,
1917 dma->cmdu, dma->srcu, dma->dstu, dma->resu,
1918 dma->cmdk, dma->srck, dma->dstk, dma->resk);
1919}
1920
1921static void hifn_work(struct work_struct *work)
1922{
1923 struct delayed_work *dw = container_of(work, struct delayed_work, work);
1924 struct hifn_device *dev = container_of(dw, struct hifn_device, work);
1925 unsigned long flags;
1926 int reset = 0;
1927 u32 r = 0;
1928
1929 spin_lock_irqsave(&dev->lock, flags);
1930 if (dev->active == 0) {
1931 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1932
1933 if (dma->cmdu == 0 && (dev->flags & HIFN_FLAG_CMD_BUSY)) {
1934 dev->flags &= ~HIFN_FLAG_CMD_BUSY;
1935 r |= HIFN_DMACSR_C_CTRL_DIS;
1936 }
1937 if (dma->srcu == 0 && (dev->flags & HIFN_FLAG_SRC_BUSY)) {
1938 dev->flags &= ~HIFN_FLAG_SRC_BUSY;
1939 r |= HIFN_DMACSR_S_CTRL_DIS;
1940 }
1941 if (dma->dstu == 0 && (dev->flags & HIFN_FLAG_DST_BUSY)) {
1942 dev->flags &= ~HIFN_FLAG_DST_BUSY;
1943 r |= HIFN_DMACSR_D_CTRL_DIS;
1944 }
1945 if (dma->resu == 0 && (dev->flags & HIFN_FLAG_RES_BUSY)) {
1946 dev->flags &= ~HIFN_FLAG_RES_BUSY;
1947 r |= HIFN_DMACSR_R_CTRL_DIS;
1948 }
1949 if (r)
1950 hifn_write_1(dev, HIFN_1_DMA_CSR, r);
1951 } else
1952 dev->active--;
1953
1954 if (dev->prev_success == dev->success && dev->started)
1955 reset = 1;
1956 dev->prev_success = dev->success;
1957 spin_unlock_irqrestore(&dev->lock, flags);
1958
1959 if (reset) {
1960 dprintk("%s: r: %08x, active: %d, started: %d, "
1961 "success: %lu: reset: %d.\n",
1962 dev->name, r, dev->active, dev->started,
1963 dev->success, reset);
1964
1965 if (++dev->reset >= 5) {
1966 dprintk("%s: really hard reset.\n", dev->name);
1967 hifn_reset_dma(dev, 1);
1968 hifn_stop_device(dev);
1969 hifn_start_device(dev);
1970 dev->reset = 0;
1971 }
1972
1973 spin_lock_irqsave(&dev->lock, flags);
1974 hifn_check_for_completion(dev, -EBUSY);
1975 hifn_clear_rings(dev);
1976 dev->started = 0;
1977 spin_unlock_irqrestore(&dev->lock, flags);
1978 }
1979
1980 schedule_delayed_work(&dev->work, HZ);
1981}
1982
1983static irqreturn_t hifn_interrupt(int irq, void *data)
1984{
1985 struct hifn_device *dev = (struct hifn_device *)data;
1986 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1987 u32 dmacsr, restart;
1988
1989 dmacsr = hifn_read_1(dev, HIFN_1_DMA_CSR);
1990
1991 dprintk("%s: 1 dmacsr: %08x, dmareg: %08x, res: %08x [%d], "
1992 "i: %d.%d.%d.%d, u: %d.%d.%d.%d.\n",
1993 dev->name, dmacsr, dev->dmareg, dmacsr & dev->dmareg, dma->cmdi,
1994 dma->cmdu, dma->srcu, dma->dstu, dma->resu,
1995 dma->cmdi, dma->srci, dma->dsti, dma->resi);
1996
1997 if ((dmacsr & dev->dmareg) == 0)
1998 return IRQ_NONE;
1999
2000 hifn_write_1(dev, HIFN_1_DMA_CSR, dmacsr & dev->dmareg);
2001
2002 if (dmacsr & HIFN_DMACSR_ENGINE)
2003 hifn_write_0(dev, HIFN_0_PUISR, hifn_read_0(dev, HIFN_0_PUISR));
2004 if (dmacsr & HIFN_DMACSR_PUBDONE)
2005 hifn_write_1(dev, HIFN_1_PUB_STATUS,
2006 hifn_read_1(dev, HIFN_1_PUB_STATUS) | HIFN_PUBSTS_DONE);
2007
2008 restart = dmacsr & (HIFN_DMACSR_R_OVER | HIFN_DMACSR_D_OVER);
2009 if (restart) {
2010 u32 puisr = hifn_read_0(dev, HIFN_0_PUISR);
2011
2012 if (printk_ratelimit())
2013 printk("%s: overflow: r: %d, d: %d, puisr: %08x, d: %u.\n",
2014 dev->name, !!(dmacsr & HIFN_DMACSR_R_OVER),
2015 !!(dmacsr & HIFN_DMACSR_D_OVER),
2016 puisr, !!(puisr & HIFN_PUISR_DSTOVER));
2017 if (!!(puisr & HIFN_PUISR_DSTOVER))
2018 hifn_write_0(dev, HIFN_0_PUISR, HIFN_PUISR_DSTOVER);
2019 hifn_write_1(dev, HIFN_1_DMA_CSR, dmacsr & (HIFN_DMACSR_R_OVER |
2020 HIFN_DMACSR_D_OVER));
2021 }
2022
2023 restart = dmacsr & (HIFN_DMACSR_C_ABORT | HIFN_DMACSR_S_ABORT |
2024 HIFN_DMACSR_D_ABORT | HIFN_DMACSR_R_ABORT);
2025 if (restart) {
2026 if (printk_ratelimit())
2027 printk("%s: abort: c: %d, s: %d, d: %d, r: %d.\n",
2028 dev->name, !!(dmacsr & HIFN_DMACSR_C_ABORT),
2029 !!(dmacsr & HIFN_DMACSR_S_ABORT),
2030 !!(dmacsr & HIFN_DMACSR_D_ABORT),
2031 !!(dmacsr & HIFN_DMACSR_R_ABORT));
2032 hifn_reset_dma(dev, 1);
2033 hifn_init_dma(dev);
2034 hifn_init_registers(dev);
2035 }
2036
2037 if ((dmacsr & HIFN_DMACSR_C_WAIT) && (dma->cmdu == 0)) {
2038 dprintk("%s: wait on command.\n", dev->name);
2039 dev->dmareg &= ~(HIFN_DMAIER_C_WAIT);
2040 hifn_write_1(dev, HIFN_1_DMA_IER, dev->dmareg);
2041 }
2042
2043 tasklet_schedule(&dev->tasklet);
2044 hifn_clear_rings(dev);
2045
2046 return IRQ_HANDLED;
2047}
2048
2049static void hifn_flush(struct hifn_device *dev)
2050{
2051 unsigned long flags;
2052 struct crypto_async_request *async_req;
2053 struct hifn_context *ctx;
2054 struct ablkcipher_request *req;
2055 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
2056 int i;
2057
2058 spin_lock_irqsave(&dev->lock, flags);
2059 for (i=0; i<HIFN_D_RES_RSIZE; ++i) {
2060 struct hifn_desc *d = &dma->resr[i];
2061
2062 if (dev->sa[i]) {
2063 hifn_process_ready(dev->sa[i],
2064 (d->l & __cpu_to_le32(HIFN_D_VALID))?-ENODEV:0);
2065 }
2066 }
2067
2068 while ((async_req = crypto_dequeue_request(&dev->queue))) {
2069 ctx = crypto_tfm_ctx(async_req->tfm);
2070 req = container_of(async_req, struct ablkcipher_request, base);
2071
2072 hifn_process_ready(req, -ENODEV);
2073 }
2074 spin_unlock_irqrestore(&dev->lock, flags);
2075}
2076
2077static int hifn_setkey(struct crypto_ablkcipher *cipher, const u8 *key,
2078 unsigned int len)
2079{
2080 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
2081 struct hifn_context *ctx = crypto_tfm_ctx(tfm);
2082 struct hifn_device *dev = ctx->dev;
2083
2084 if (len > HIFN_MAX_CRYPT_KEY_LENGTH) {
2085 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
2086 return -1;
2087 }
2088
2089 if (len == HIFN_DES_KEY_LENGTH) {
2090 u32 tmp[DES_EXPKEY_WORDS];
2091 int ret = des_ekey(tmp, key);
2092
2093 if (unlikely(ret == 0) && (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
2094 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
2095 return -EINVAL;
2096 }
2097 }
2098
2099 dev->flags &= ~HIFN_FLAG_OLD_KEY;
2100
2101 memcpy(ctx->key, key, len);
2102 ctx->keysize = len;
2103
2104 return 0;
2105}
2106
2107static int hifn_handle_req(struct ablkcipher_request *req)
2108{
2109 struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
2110 struct hifn_device *dev = ctx->dev;
2111 int err = -EAGAIN;
2112
2113 if (dev->started + DIV_ROUND_UP(req->nbytes, PAGE_SIZE) <= HIFN_QUEUE_LENGTH)
2114 err = hifn_setup_session(req);
2115
2116 if (err == -EAGAIN) {
2117 unsigned long flags;
2118
2119 spin_lock_irqsave(&dev->lock, flags);
2120 err = ablkcipher_enqueue_request(&dev->queue, req);
2121 spin_unlock_irqrestore(&dev->lock, flags);
2122 }
2123
2124 return err;
2125}
2126
2127static int hifn_setup_crypto_req(struct ablkcipher_request *req, u8 op,
2128 u8 type, u8 mode)
2129{
2130 struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
2131 unsigned ivsize;
2132
2133 ivsize = crypto_ablkcipher_ivsize(crypto_ablkcipher_reqtfm(req));
2134
2135 if (req->info && mode != ACRYPTO_MODE_ECB) {
2136 if (type == ACRYPTO_TYPE_AES_128)
2137 ivsize = HIFN_AES_IV_LENGTH;
2138 else if (type == ACRYPTO_TYPE_DES)
2139 ivsize = HIFN_DES_KEY_LENGTH;
2140 else if (type == ACRYPTO_TYPE_3DES)
2141 ivsize = HIFN_3DES_KEY_LENGTH;
2142 }
2143
2144 if (ctx->keysize != 16 && type == ACRYPTO_TYPE_AES_128) {
2145 if (ctx->keysize == 24)
2146 type = ACRYPTO_TYPE_AES_192;
2147 else if (ctx->keysize == 32)
2148 type = ACRYPTO_TYPE_AES_256;
2149 }
2150
2151 ctx->op = op;
2152 ctx->mode = mode;
2153 ctx->type = type;
2154 ctx->iv = req->info;
2155 ctx->ivsize = ivsize;
2156
2157 /*
2158 * HEAVY TODO: needs to kick Herbert XU to write documentation.
2159 * HEAVY TODO: needs to kick Herbert XU to write documentation.
2160 * HEAVY TODO: needs to kick Herbert XU to write documentation.
2161 */
2162
2163 return hifn_handle_req(req);
2164}
2165
2166static int hifn_process_queue(struct hifn_device *dev)
2167{
2168 struct crypto_async_request *async_req;
2169 struct hifn_context *ctx;
2170 struct ablkcipher_request *req;
2171 unsigned long flags;
2172 int err = 0;
2173
2174 while (dev->started < HIFN_QUEUE_LENGTH) {
2175 spin_lock_irqsave(&dev->lock, flags);
2176 async_req = crypto_dequeue_request(&dev->queue);
2177 spin_unlock_irqrestore(&dev->lock, flags);
2178
2179 if (!async_req)
2180 break;
2181
2182 ctx = crypto_tfm_ctx(async_req->tfm);
2183 req = container_of(async_req, struct ablkcipher_request, base);
2184
2185 err = hifn_handle_req(req);
2186 if (err)
2187 break;
2188 }
2189
2190 return err;
2191}
2192
2193static int hifn_setup_crypto(struct ablkcipher_request *req, u8 op,
2194 u8 type, u8 mode)
2195{
2196 int err;
2197 struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
2198 struct hifn_device *dev = ctx->dev;
2199
2200 err = hifn_setup_crypto_req(req, op, type, mode);
2201 if (err)
2202 return err;
2203
2204 if (dev->started < HIFN_QUEUE_LENGTH && dev->queue.qlen)
2205 err = hifn_process_queue(dev);
2206
2207 return err;
2208}
2209
2210/*
2211 * AES ecryption functions.
2212 */
2213static inline int hifn_encrypt_aes_ecb(struct ablkcipher_request *req)
2214{
2215 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2216 ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_ECB);
2217}
2218static inline int hifn_encrypt_aes_cbc(struct ablkcipher_request *req)
2219{
2220 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2221 ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_CBC);
2222}
2223static inline int hifn_encrypt_aes_cfb(struct ablkcipher_request *req)
2224{
2225 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2226 ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_CFB);
2227}
2228static inline int hifn_encrypt_aes_ofb(struct ablkcipher_request *req)
2229{
2230 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2231 ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_OFB);
2232}
2233
2234/*
2235 * AES decryption functions.
2236 */
2237static inline int hifn_decrypt_aes_ecb(struct ablkcipher_request *req)
2238{
2239 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2240 ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_ECB);
2241}
2242static inline int hifn_decrypt_aes_cbc(struct ablkcipher_request *req)
2243{
2244 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2245 ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_CBC);
2246}
2247static inline int hifn_decrypt_aes_cfb(struct ablkcipher_request *req)
2248{
2249 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2250 ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_CFB);
2251}
2252static inline int hifn_decrypt_aes_ofb(struct ablkcipher_request *req)
2253{
2254 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2255 ACRYPTO_TYPE_AES_128, ACRYPTO_MODE_OFB);
2256}
2257
2258/*
2259 * DES ecryption functions.
2260 */
2261static inline int hifn_encrypt_des_ecb(struct ablkcipher_request *req)
2262{
2263 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2264 ACRYPTO_TYPE_DES, ACRYPTO_MODE_ECB);
2265}
2266static inline int hifn_encrypt_des_cbc(struct ablkcipher_request *req)
2267{
2268 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2269 ACRYPTO_TYPE_DES, ACRYPTO_MODE_CBC);
2270}
2271static inline int hifn_encrypt_des_cfb(struct ablkcipher_request *req)
2272{
2273 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2274 ACRYPTO_TYPE_DES, ACRYPTO_MODE_CFB);
2275}
2276static inline int hifn_encrypt_des_ofb(struct ablkcipher_request *req)
2277{
2278 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2279 ACRYPTO_TYPE_DES, ACRYPTO_MODE_OFB);
2280}
2281
2282/*
2283 * DES decryption functions.
2284 */
2285static inline int hifn_decrypt_des_ecb(struct ablkcipher_request *req)
2286{
2287 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2288 ACRYPTO_TYPE_DES, ACRYPTO_MODE_ECB);
2289}
2290static inline int hifn_decrypt_des_cbc(struct ablkcipher_request *req)
2291{
2292 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2293 ACRYPTO_TYPE_DES, ACRYPTO_MODE_CBC);
2294}
2295static inline int hifn_decrypt_des_cfb(struct ablkcipher_request *req)
2296{
2297 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2298 ACRYPTO_TYPE_DES, ACRYPTO_MODE_CFB);
2299}
2300static inline int hifn_decrypt_des_ofb(struct ablkcipher_request *req)
2301{
2302 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2303 ACRYPTO_TYPE_DES, ACRYPTO_MODE_OFB);
2304}
2305
2306/*
2307 * 3DES ecryption functions.
2308 */
2309static inline int hifn_encrypt_3des_ecb(struct ablkcipher_request *req)
2310{
2311 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2312 ACRYPTO_TYPE_3DES, ACRYPTO_MODE_ECB);
2313}
2314static inline int hifn_encrypt_3des_cbc(struct ablkcipher_request *req)
2315{
2316 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2317 ACRYPTO_TYPE_3DES, ACRYPTO_MODE_CBC);
2318}
2319static inline int hifn_encrypt_3des_cfb(struct ablkcipher_request *req)
2320{
2321 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2322 ACRYPTO_TYPE_3DES, ACRYPTO_MODE_CFB);
2323}
2324static inline int hifn_encrypt_3des_ofb(struct ablkcipher_request *req)
2325{
2326 return hifn_setup_crypto(req, ACRYPTO_OP_ENCRYPT,
2327 ACRYPTO_TYPE_3DES, ACRYPTO_MODE_OFB);
2328}
2329
2330/*
2331 * 3DES decryption functions.
2332 */
2333static inline int hifn_decrypt_3des_ecb(struct ablkcipher_request *req)
2334{
2335 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2336 ACRYPTO_TYPE_3DES, ACRYPTO_MODE_ECB);
2337}
2338static inline int hifn_decrypt_3des_cbc(struct ablkcipher_request *req)
2339{
2340 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2341 ACRYPTO_TYPE_3DES, ACRYPTO_MODE_CBC);
2342}
2343static inline int hifn_decrypt_3des_cfb(struct ablkcipher_request *req)
2344{
2345 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2346 ACRYPTO_TYPE_3DES, ACRYPTO_MODE_CFB);
2347}
2348static inline int hifn_decrypt_3des_ofb(struct ablkcipher_request *req)
2349{
2350 return hifn_setup_crypto(req, ACRYPTO_OP_DECRYPT,
2351 ACRYPTO_TYPE_3DES, ACRYPTO_MODE_OFB);
2352}
2353
2354struct hifn_alg_template
2355{
2356 char name[CRYPTO_MAX_ALG_NAME];
2357 char drv_name[CRYPTO_MAX_ALG_NAME];
2358 unsigned int bsize;
2359 struct ablkcipher_alg ablkcipher;
2360};
2361
2362static struct hifn_alg_template hifn_alg_templates[] = {
2363 /*
2364 * 3DES ECB, CBC, CFB and OFB modes.
2365 */
2366 {
2367 .name = "cfb(des3_ede)", .drv_name = "hifn-3des", .bsize = 8,
2368 .ablkcipher = {
2369 .min_keysize = HIFN_3DES_KEY_LENGTH,
2370 .max_keysize = HIFN_3DES_KEY_LENGTH,
2371 .setkey = hifn_setkey,
2372 .encrypt = hifn_encrypt_3des_cfb,
2373 .decrypt = hifn_decrypt_3des_cfb,
2374 },
2375 },
2376 {
2377 .name = "ofb(des3_ede)", .drv_name = "hifn-3des", .bsize = 8,
2378 .ablkcipher = {
2379 .min_keysize = HIFN_3DES_KEY_LENGTH,
2380 .max_keysize = HIFN_3DES_KEY_LENGTH,
2381 .setkey = hifn_setkey,
2382 .encrypt = hifn_encrypt_3des_ofb,
2383 .decrypt = hifn_decrypt_3des_ofb,
2384 },
2385 },
2386 {
2387 .name = "cbc(des3_ede)", .drv_name = "hifn-3des", .bsize = 8,
2388 .ablkcipher = {
2389 .min_keysize = HIFN_3DES_KEY_LENGTH,
2390 .max_keysize = HIFN_3DES_KEY_LENGTH,
2391 .setkey = hifn_setkey,
2392 .encrypt = hifn_encrypt_3des_cbc,
2393 .decrypt = hifn_decrypt_3des_cbc,
2394 },
2395 },
2396 {
2397 .name = "ecb(des3_ede)", .drv_name = "hifn-3des", .bsize = 8,
2398 .ablkcipher = {
2399 .min_keysize = HIFN_3DES_KEY_LENGTH,
2400 .max_keysize = HIFN_3DES_KEY_LENGTH,
2401 .setkey = hifn_setkey,
2402 .encrypt = hifn_encrypt_3des_ecb,
2403 .decrypt = hifn_decrypt_3des_ecb,
2404 },
2405 },
2406
2407 /*
2408 * DES ECB, CBC, CFB and OFB modes.
2409 */
2410 {
2411 .name = "cfb(des)", .drv_name = "hifn-des", .bsize = 8,
2412 .ablkcipher = {
2413 .min_keysize = HIFN_DES_KEY_LENGTH,
2414 .max_keysize = HIFN_DES_KEY_LENGTH,
2415 .setkey = hifn_setkey,
2416 .encrypt = hifn_encrypt_des_cfb,
2417 .decrypt = hifn_decrypt_des_cfb,
2418 },
2419 },
2420 {
2421 .name = "ofb(des)", .drv_name = "hifn-des", .bsize = 8,
2422 .ablkcipher = {
2423 .min_keysize = HIFN_DES_KEY_LENGTH,
2424 .max_keysize = HIFN_DES_KEY_LENGTH,
2425 .setkey = hifn_setkey,
2426 .encrypt = hifn_encrypt_des_ofb,
2427 .decrypt = hifn_decrypt_des_ofb,
2428 },
2429 },
2430 {
2431 .name = "cbc(des)", .drv_name = "hifn-des", .bsize = 8,
2432 .ablkcipher = {
2433 .min_keysize = HIFN_DES_KEY_LENGTH,
2434 .max_keysize = HIFN_DES_KEY_LENGTH,
2435 .setkey = hifn_setkey,
2436 .encrypt = hifn_encrypt_des_cbc,
2437 .decrypt = hifn_decrypt_des_cbc,
2438 },
2439 },
2440 {
2441 .name = "ecb(des)", .drv_name = "hifn-des", .bsize = 8,
2442 .ablkcipher = {
2443 .min_keysize = HIFN_DES_KEY_LENGTH,
2444 .max_keysize = HIFN_DES_KEY_LENGTH,
2445 .setkey = hifn_setkey,
2446 .encrypt = hifn_encrypt_des_ecb,
2447 .decrypt = hifn_decrypt_des_ecb,
2448 },
2449 },
2450
2451 /*
2452 * AES ECB, CBC, CFB and OFB modes.
2453 */
2454 {
2455 .name = "ecb(aes)", .drv_name = "hifn-aes", .bsize = 16,
2456 .ablkcipher = {
2457 .min_keysize = AES_MIN_KEY_SIZE,
2458 .max_keysize = AES_MAX_KEY_SIZE,
2459 .setkey = hifn_setkey,
2460 .encrypt = hifn_encrypt_aes_ecb,
2461 .decrypt = hifn_decrypt_aes_ecb,
2462 },
2463 },
2464 {
2465 .name = "cbc(aes)", .drv_name = "hifn-aes", .bsize = 16,
2466 .ablkcipher = {
2467 .min_keysize = AES_MIN_KEY_SIZE,
2468 .max_keysize = AES_MAX_KEY_SIZE,
2469 .setkey = hifn_setkey,
2470 .encrypt = hifn_encrypt_aes_cbc,
2471 .decrypt = hifn_decrypt_aes_cbc,
2472 },
2473 },
2474 {
2475 .name = "cfb(aes)", .drv_name = "hifn-aes", .bsize = 16,
2476 .ablkcipher = {
2477 .min_keysize = AES_MIN_KEY_SIZE,
2478 .max_keysize = AES_MAX_KEY_SIZE,
2479 .setkey = hifn_setkey,
2480 .encrypt = hifn_encrypt_aes_cfb,
2481 .decrypt = hifn_decrypt_aes_cfb,
2482 },
2483 },
2484 {
2485 .name = "ofb(aes)", .drv_name = "hifn-aes", .bsize = 16,
2486 .ablkcipher = {
2487 .min_keysize = AES_MIN_KEY_SIZE,
2488 .max_keysize = AES_MAX_KEY_SIZE,
2489 .setkey = hifn_setkey,
2490 .encrypt = hifn_encrypt_aes_ofb,
2491 .decrypt = hifn_decrypt_aes_ofb,
2492 },
2493 },
2494};
2495
2496static int hifn_cra_init(struct crypto_tfm *tfm)
2497{
2498 struct crypto_alg *alg = tfm->__crt_alg;
2499 struct hifn_crypto_alg *ha = crypto_alg_to_hifn(alg);
2500 struct hifn_context *ctx = crypto_tfm_ctx(tfm);
2501
2502 ctx->dev = ha->dev;
2503
2504 return 0;
2505}
2506
2507static int hifn_alg_alloc(struct hifn_device *dev, struct hifn_alg_template *t)
2508{
2509 struct hifn_crypto_alg *alg;
2510 int err;
2511
2512 alg = kzalloc(sizeof(struct hifn_crypto_alg), GFP_KERNEL);
2513 if (!alg)
2514 return -ENOMEM;
2515
2516 snprintf(alg->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s", t->name);
2517 snprintf(alg->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", t->drv_name);
2518
2519 alg->alg.cra_priority = 300;
2520 alg->alg.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
2521 alg->alg.cra_blocksize = t->bsize;
2522 alg->alg.cra_ctxsize = sizeof(struct hifn_context);
2523 alg->alg.cra_alignmask = 15;
2524 if (t->bsize == 8)
2525 alg->alg.cra_alignmask = 3;
2526 alg->alg.cra_type = &crypto_ablkcipher_type;
2527 alg->alg.cra_module = THIS_MODULE;
2528 alg->alg.cra_u.ablkcipher = t->ablkcipher;
2529 alg->alg.cra_init = hifn_cra_init;
2530
2531 alg->dev = dev;
2532
2533 list_add_tail(&alg->entry, &dev->alg_list);
2534
2535 err = crypto_register_alg(&alg->alg);
2536 if (err) {
2537 list_del(&alg->entry);
2538 kfree(alg);
2539 }
2540
2541 return err;
2542}
2543
2544static void hifn_unregister_alg(struct hifn_device *dev)
2545{
2546 struct hifn_crypto_alg *a, *n;
2547
2548 list_for_each_entry_safe(a, n, &dev->alg_list, entry) {
2549 list_del(&a->entry);
2550 crypto_unregister_alg(&a->alg);
2551 kfree(a);
2552 }
2553}
2554
2555static int hifn_register_alg(struct hifn_device *dev)
2556{
2557 int i, err;
2558
2559 for (i=0; i<ARRAY_SIZE(hifn_alg_templates); ++i) {
2560 err = hifn_alg_alloc(dev, &hifn_alg_templates[i]);
2561 if (err)
2562 goto err_out_exit;
2563 }
2564
2565 return 0;
2566
2567err_out_exit:
2568 hifn_unregister_alg(dev);
2569 return err;
2570}
2571
2572static void hifn_tasklet_callback(unsigned long data)
2573{
2574 struct hifn_device *dev = (struct hifn_device *)data;
2575
2576 /*
2577 * This is ok to call this without lock being held,
2578 * althogh it modifies some parameters used in parallel,
2579 * (like dev->success), but they are used in process
2580 * context or update is atomic (like setting dev->sa[i] to NULL).
2581 */
2582 hifn_check_for_completion(dev, 0);
2583}
2584
2585static int hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id)
2586{
2587 int err, i;
2588 struct hifn_device *dev;
2589 char name[8];
2590
2591 err = pci_enable_device(pdev);
2592 if (err)
2593 return err;
2594 pci_set_master(pdev);
2595
2596 err = pci_set_dma_mask(pdev, DMA_32BIT_MASK);
2597 if (err)
2598 goto err_out_disable_pci_device;
2599
2600 snprintf(name, sizeof(name), "hifn%d",
2601 atomic_inc_return(&hifn_dev_number)-1);
2602
2603 err = pci_request_regions(pdev, name);
2604 if (err)
2605 goto err_out_disable_pci_device;
2606
2607 if (pci_resource_len(pdev, 0) < HIFN_BAR0_SIZE ||
2608 pci_resource_len(pdev, 1) < HIFN_BAR1_SIZE ||
2609 pci_resource_len(pdev, 2) < HIFN_BAR2_SIZE) {
2610 dprintk("%s: Broken hardware - I/O regions are too small.\n",
2611 pci_name(pdev));
2612 err = -ENODEV;
2613 goto err_out_free_regions;
2614 }
2615
2616 dev = kzalloc(sizeof(struct hifn_device) + sizeof(struct crypto_alg),
2617 GFP_KERNEL);
2618 if (!dev) {
2619 err = -ENOMEM;
2620 goto err_out_free_regions;
2621 }
2622
2623 INIT_LIST_HEAD(&dev->alg_list);
2624
2625 snprintf(dev->name, sizeof(dev->name), "%s", name);
2626 spin_lock_init(&dev->lock);
2627
2628 for (i=0; i<3; ++i) {
2629 unsigned long addr, size;
2630
2631 addr = pci_resource_start(pdev, i);
2632 size = pci_resource_len(pdev, i);
2633
2634 dev->bar[i] = ioremap_nocache(addr, size);
2635 if (!dev->bar[i])
2636 goto err_out_unmap_bars;
2637 }
2638
2639 dev->result_mem = __get_free_pages(GFP_KERNEL, HIFN_MAX_RESULT_ORDER);
2640 if (!dev->result_mem) {
2641 dprintk("Failed to allocate %d pages for result_mem.\n",
2642 HIFN_MAX_RESULT_ORDER);
2643 goto err_out_unmap_bars;
2644 }
2645 memset((void *)dev->result_mem, 0, PAGE_SIZE*(1<<HIFN_MAX_RESULT_ORDER));
2646
2647 dev->dst = pci_map_single(pdev, (void *)dev->result_mem,
2648 PAGE_SIZE << HIFN_MAX_RESULT_ORDER, PCI_DMA_FROMDEVICE);
2649
2650 dev->desc_virt = pci_alloc_consistent(pdev, sizeof(struct hifn_dma),
2651 &dev->desc_dma);
2652 if (!dev->desc_virt) {
2653 dprintk("Failed to allocate descriptor rings.\n");
2654 goto err_out_free_result_pages;
2655 }
2656 memset(dev->desc_virt, 0, sizeof(struct hifn_dma));
2657
2658 dev->pdev = pdev;
2659 dev->irq = pdev->irq;
2660
2661 for (i=0; i<HIFN_D_RES_RSIZE; ++i)
2662 dev->sa[i] = NULL;
2663
2664 pci_set_drvdata(pdev, dev);
2665
2666 tasklet_init(&dev->tasklet, hifn_tasklet_callback, (unsigned long)dev);
2667
2668 crypto_init_queue(&dev->queue, 1);
2669
2670 err = request_irq(dev->irq, hifn_interrupt, IRQF_SHARED, dev->name, dev);
2671 if (err) {
2672 dprintk("Failed to request IRQ%d: err: %d.\n", dev->irq, err);
2673 dev->irq = 0;
2674 goto err_out_free_desc;
2675 }
2676
2677 err = hifn_start_device(dev);
2678 if (err)
2679 goto err_out_free_irq;
2680
2681 err = hifn_test(dev, 1, 0);
2682 if (err)
2683 goto err_out_stop_device;
2684
2685 err = hifn_register_rng(dev);
2686 if (err)
2687 goto err_out_stop_device;
2688
2689 err = hifn_register_alg(dev);
2690 if (err)
2691 goto err_out_unregister_rng;
2692
2693 INIT_DELAYED_WORK(&dev->work, hifn_work);
2694 schedule_delayed_work(&dev->work, HZ);
2695
2696 dprintk("HIFN crypto accelerator card at %s has been "
2697 "successfully registered as %s.\n",
2698 pci_name(pdev), dev->name);
2699
2700 return 0;
2701
2702err_out_unregister_rng:
2703 hifn_unregister_rng(dev);
2704err_out_stop_device:
2705 hifn_reset_dma(dev, 1);
2706 hifn_stop_device(dev);
2707err_out_free_irq:
2708 free_irq(dev->irq, dev->name);
2709 tasklet_kill(&dev->tasklet);
2710err_out_free_desc:
2711 pci_free_consistent(pdev, sizeof(struct hifn_dma),
2712 dev->desc_virt, dev->desc_dma);
2713
2714err_out_free_result_pages:
2715 pci_unmap_single(pdev, dev->dst, PAGE_SIZE << HIFN_MAX_RESULT_ORDER,
2716 PCI_DMA_FROMDEVICE);
2717 free_pages(dev->result_mem, HIFN_MAX_RESULT_ORDER);
2718
2719err_out_unmap_bars:
2720 for (i=0; i<3; ++i)
2721 if (dev->bar[i])
2722 iounmap(dev->bar[i]);
2723
2724err_out_free_regions:
2725 pci_release_regions(pdev);
2726
2727err_out_disable_pci_device:
2728 pci_disable_device(pdev);
2729
2730 return err;
2731}
2732
2733static void hifn_remove(struct pci_dev *pdev)
2734{
2735 int i;
2736 struct hifn_device *dev;
2737
2738 dev = pci_get_drvdata(pdev);
2739
2740 if (dev) {
2741 cancel_delayed_work(&dev->work);
2742 flush_scheduled_work();
2743
2744 hifn_unregister_rng(dev);
2745 hifn_unregister_alg(dev);
2746 hifn_reset_dma(dev, 1);
2747 hifn_stop_device(dev);
2748
2749 free_irq(dev->irq, dev->name);
2750 tasklet_kill(&dev->tasklet);
2751
2752 hifn_flush(dev);
2753
2754 pci_free_consistent(pdev, sizeof(struct hifn_dma),
2755 dev->desc_virt, dev->desc_dma);
2756 pci_unmap_single(pdev, dev->dst,
2757 PAGE_SIZE << HIFN_MAX_RESULT_ORDER,
2758 PCI_DMA_FROMDEVICE);
2759 free_pages(dev->result_mem, HIFN_MAX_RESULT_ORDER);
2760 for (i=0; i<3; ++i)
2761 if (dev->bar[i])
2762 iounmap(dev->bar[i]);
2763
2764 kfree(dev);
2765 }
2766
2767 pci_release_regions(pdev);
2768 pci_disable_device(pdev);
2769}
2770
2771static struct pci_device_id hifn_pci_tbl[] = {
2772 { PCI_DEVICE(PCI_VENDOR_ID_HIFN, PCI_DEVICE_ID_HIFN_7955) },
2773 { PCI_DEVICE(PCI_VENDOR_ID_HIFN, PCI_DEVICE_ID_HIFN_7956) },
2774 { 0 }
2775};
2776MODULE_DEVICE_TABLE(pci, hifn_pci_tbl);
2777
2778static struct pci_driver hifn_pci_driver = {
2779 .name = "hifn795x",
2780 .id_table = hifn_pci_tbl,
2781 .probe = hifn_probe,
2782 .remove = __devexit_p(hifn_remove),
2783};
2784
2785static int __devinit hifn_init(void)
2786{
2787 unsigned int freq;
2788 int err;
2789
2790 if (strncmp(hifn_pll_ref, "ext", 3) &&
2791 strncmp(hifn_pll_ref, "pci", 3)) {
2792 printk(KERN_ERR "hifn795x: invalid hifn_pll_ref clock, "
2793 "must be pci or ext");
2794 return -EINVAL;
2795 }
2796
2797 /*
2798 * For the 7955/7956 the reference clock frequency must be in the
2799 * range of 20MHz-100MHz. For the 7954 the upper bound is 66.67MHz,
2800 * but this chip is currently not supported.
2801 */
2802 if (hifn_pll_ref[3] != '\0') {
2803 freq = simple_strtoul(hifn_pll_ref + 3, NULL, 10);
2804 if (freq < 20 || freq > 100) {
2805 printk(KERN_ERR "hifn795x: invalid hifn_pll_ref "
2806 "frequency, must be in the range "
2807 "of 20-100");
2808 return -EINVAL;
2809 }
2810 }
2811
2812 err = pci_register_driver(&hifn_pci_driver);
2813 if (err < 0) {
2814 dprintk("Failed to register PCI driver for %s device.\n",
2815 hifn_pci_driver.name);
2816 return -ENODEV;
2817 }
2818
2819 printk(KERN_INFO "Driver for HIFN 795x crypto accelerator chip "
2820 "has been successfully registered.\n");
2821
2822 return 0;
2823}
2824
2825static void __devexit hifn_fini(void)
2826{
2827 pci_unregister_driver(&hifn_pci_driver);
2828
2829 printk(KERN_INFO "Driver for HIFN 795x crypto accelerator chip "
2830 "has been successfully unregistered.\n");
2831}
2832
2833module_init(hifn_init);
2834module_exit(hifn_fini);
2835
2836MODULE_LICENSE("GPL");
2837MODULE_AUTHOR("Evgeniy Polyakov <johnpol@2ka.mipt.ru>");
2838MODULE_DESCRIPTION("Driver for HIFN 795x crypto accelerator chip.");
diff --git a/drivers/crypto/padlock-aes.c b/drivers/crypto/padlock-aes.c
index 5f7e71810489..2f3ad3f7dfea 100644
--- a/drivers/crypto/padlock-aes.c
+++ b/drivers/crypto/padlock-aes.c
@@ -44,6 +44,7 @@
44 */ 44 */
45 45
46#include <crypto/algapi.h> 46#include <crypto/algapi.h>
47#include <crypto/aes.h>
47#include <linux/module.h> 48#include <linux/module.h>
48#include <linux/init.h> 49#include <linux/init.h>
49#include <linux/types.h> 50#include <linux/types.h>
@@ -53,9 +54,6 @@
53#include <asm/byteorder.h> 54#include <asm/byteorder.h>
54#include "padlock.h" 55#include "padlock.h"
55 56
56#define AES_MIN_KEY_SIZE 16 /* in uint8_t units */
57#define AES_MAX_KEY_SIZE 32 /* ditto */
58#define AES_BLOCK_SIZE 16 /* ditto */
59#define AES_EXTENDED_KEY_SIZE 64 /* in uint32_t units */ 57#define AES_EXTENDED_KEY_SIZE 64 /* in uint32_t units */
60#define AES_EXTENDED_KEY_SIZE_B (AES_EXTENDED_KEY_SIZE * sizeof(uint32_t)) 58#define AES_EXTENDED_KEY_SIZE_B (AES_EXTENDED_KEY_SIZE * sizeof(uint32_t))
61 59
@@ -419,6 +417,11 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
419/* ====== Encryption/decryption routines ====== */ 417/* ====== Encryption/decryption routines ====== */
420 418
421/* These are the real call to PadLock. */ 419/* These are the real call to PadLock. */
420static inline void padlock_reset_key(void)
421{
422 asm volatile ("pushfl; popfl");
423}
424
422static inline void padlock_xcrypt(const u8 *input, u8 *output, void *key, 425static inline void padlock_xcrypt(const u8 *input, u8 *output, void *key,
423 void *control_word) 426 void *control_word)
424{ 427{
@@ -439,8 +442,6 @@ static void aes_crypt_copy(const u8 *in, u8 *out, u32 *key, struct cword *cword)
439static inline void aes_crypt(const u8 *in, u8 *out, u32 *key, 442static inline void aes_crypt(const u8 *in, u8 *out, u32 *key,
440 struct cword *cword) 443 struct cword *cword)
441{ 444{
442 asm volatile ("pushfl; popfl");
443
444 /* padlock_xcrypt requires at least two blocks of data. */ 445 /* padlock_xcrypt requires at least two blocks of data. */
445 if (unlikely(!(((unsigned long)in ^ (PAGE_SIZE - AES_BLOCK_SIZE)) & 446 if (unlikely(!(((unsigned long)in ^ (PAGE_SIZE - AES_BLOCK_SIZE)) &
446 (PAGE_SIZE - 1)))) { 447 (PAGE_SIZE - 1)))) {
@@ -459,7 +460,6 @@ static inline void padlock_xcrypt_ecb(const u8 *input, u8 *output, void *key,
459 return; 460 return;
460 } 461 }
461 462
462 asm volatile ("pushfl; popfl"); /* enforce key reload. */
463 asm volatile ("test $1, %%cl;" 463 asm volatile ("test $1, %%cl;"
464 "je 1f;" 464 "je 1f;"
465 "lea -1(%%ecx), %%eax;" 465 "lea -1(%%ecx), %%eax;"
@@ -476,8 +476,6 @@ static inline void padlock_xcrypt_ecb(const u8 *input, u8 *output, void *key,
476static inline u8 *padlock_xcrypt_cbc(const u8 *input, u8 *output, void *key, 476static inline u8 *padlock_xcrypt_cbc(const u8 *input, u8 *output, void *key,
477 u8 *iv, void *control_word, u32 count) 477 u8 *iv, void *control_word, u32 count)
478{ 478{
479 /* Enforce key reload. */
480 asm volatile ("pushfl; popfl");
481 /* rep xcryptcbc */ 479 /* rep xcryptcbc */
482 asm volatile (".byte 0xf3,0x0f,0xa7,0xd0" 480 asm volatile (".byte 0xf3,0x0f,0xa7,0xd0"
483 : "+S" (input), "+D" (output), "+a" (iv) 481 : "+S" (input), "+D" (output), "+a" (iv)
@@ -488,12 +486,14 @@ static inline u8 *padlock_xcrypt_cbc(const u8 *input, u8 *output, void *key,
488static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 486static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
489{ 487{
490 struct aes_ctx *ctx = aes_ctx(tfm); 488 struct aes_ctx *ctx = aes_ctx(tfm);
489 padlock_reset_key();
491 aes_crypt(in, out, ctx->E, &ctx->cword.encrypt); 490 aes_crypt(in, out, ctx->E, &ctx->cword.encrypt);
492} 491}
493 492
494static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 493static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
495{ 494{
496 struct aes_ctx *ctx = aes_ctx(tfm); 495 struct aes_ctx *ctx = aes_ctx(tfm);
496 padlock_reset_key();
497 aes_crypt(in, out, ctx->D, &ctx->cword.decrypt); 497 aes_crypt(in, out, ctx->D, &ctx->cword.decrypt);
498} 498}
499 499
@@ -526,6 +526,8 @@ static int ecb_aes_encrypt(struct blkcipher_desc *desc,
526 struct blkcipher_walk walk; 526 struct blkcipher_walk walk;
527 int err; 527 int err;
528 528
529 padlock_reset_key();
530
529 blkcipher_walk_init(&walk, dst, src, nbytes); 531 blkcipher_walk_init(&walk, dst, src, nbytes);
530 err = blkcipher_walk_virt(desc, &walk); 532 err = blkcipher_walk_virt(desc, &walk);
531 533
@@ -548,6 +550,8 @@ static int ecb_aes_decrypt(struct blkcipher_desc *desc,
548 struct blkcipher_walk walk; 550 struct blkcipher_walk walk;
549 int err; 551 int err;
550 552
553 padlock_reset_key();
554
551 blkcipher_walk_init(&walk, dst, src, nbytes); 555 blkcipher_walk_init(&walk, dst, src, nbytes);
552 err = blkcipher_walk_virt(desc, &walk); 556 err = blkcipher_walk_virt(desc, &walk);
553 557
@@ -592,6 +596,8 @@ static int cbc_aes_encrypt(struct blkcipher_desc *desc,
592 struct blkcipher_walk walk; 596 struct blkcipher_walk walk;
593 int err; 597 int err;
594 598
599 padlock_reset_key();
600
595 blkcipher_walk_init(&walk, dst, src, nbytes); 601 blkcipher_walk_init(&walk, dst, src, nbytes);
596 err = blkcipher_walk_virt(desc, &walk); 602 err = blkcipher_walk_virt(desc, &walk);
597 603
@@ -616,6 +622,8 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc,
616 struct blkcipher_walk walk; 622 struct blkcipher_walk walk;
617 int err; 623 int err;
618 624
625 padlock_reset_key();
626
619 blkcipher_walk_init(&walk, dst, src, nbytes); 627 blkcipher_walk_init(&walk, dst, src, nbytes);
620 err = blkcipher_walk_virt(desc, &walk); 628 err = blkcipher_walk_virt(desc, &walk);
621 629
diff --git a/include/crypto/aead.h b/include/crypto/aead.h
new file mode 100644
index 000000000000..0edf949f6369
--- /dev/null
+++ b/include/crypto/aead.h
@@ -0,0 +1,105 @@
1/*
2 * AEAD: Authenticated Encryption with Associated Data
3 *
4 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#ifndef _CRYPTO_AEAD_H
14#define _CRYPTO_AEAD_H
15
16#include <linux/crypto.h>
17#include <linux/kernel.h>
18#include <linux/slab.h>
19
20/**
21 * struct aead_givcrypt_request - AEAD request with IV generation
22 * @seq: Sequence number for IV generation
23 * @giv: Space for generated IV
24 * @areq: The AEAD request itself
25 */
26struct aead_givcrypt_request {
27 u64 seq;
28 u8 *giv;
29
30 struct aead_request areq;
31};
32
33static inline struct crypto_aead *aead_givcrypt_reqtfm(
34 struct aead_givcrypt_request *req)
35{
36 return crypto_aead_reqtfm(&req->areq);
37}
38
39static inline int crypto_aead_givencrypt(struct aead_givcrypt_request *req)
40{
41 struct aead_tfm *crt = crypto_aead_crt(aead_givcrypt_reqtfm(req));
42 return crt->givencrypt(req);
43};
44
45static inline int crypto_aead_givdecrypt(struct aead_givcrypt_request *req)
46{
47 struct aead_tfm *crt = crypto_aead_crt(aead_givcrypt_reqtfm(req));
48 return crt->givdecrypt(req);
49};
50
51static inline void aead_givcrypt_set_tfm(struct aead_givcrypt_request *req,
52 struct crypto_aead *tfm)
53{
54 req->areq.base.tfm = crypto_aead_tfm(tfm);
55}
56
57static inline struct aead_givcrypt_request *aead_givcrypt_alloc(
58 struct crypto_aead *tfm, gfp_t gfp)
59{
60 struct aead_givcrypt_request *req;
61
62 req = kmalloc(sizeof(struct aead_givcrypt_request) +
63 crypto_aead_reqsize(tfm), gfp);
64
65 if (likely(req))
66 aead_givcrypt_set_tfm(req, tfm);
67
68 return req;
69}
70
71static inline void aead_givcrypt_free(struct aead_givcrypt_request *req)
72{
73 kfree(req);
74}
75
76static inline void aead_givcrypt_set_callback(
77 struct aead_givcrypt_request *req, u32 flags,
78 crypto_completion_t complete, void *data)
79{
80 aead_request_set_callback(&req->areq, flags, complete, data);
81}
82
83static inline void aead_givcrypt_set_crypt(struct aead_givcrypt_request *req,
84 struct scatterlist *src,
85 struct scatterlist *dst,
86 unsigned int nbytes, void *iv)
87{
88 aead_request_set_crypt(&req->areq, src, dst, nbytes, iv);
89}
90
91static inline void aead_givcrypt_set_assoc(struct aead_givcrypt_request *req,
92 struct scatterlist *assoc,
93 unsigned int assoclen)
94{
95 aead_request_set_assoc(&req->areq, assoc, assoclen);
96}
97
98static inline void aead_givcrypt_set_giv(struct aead_givcrypt_request *req,
99 u8 *giv, u64 seq)
100{
101 req->giv = giv;
102 req->seq = seq;
103}
104
105#endif /* _CRYPTO_AEAD_H */
diff --git a/include/crypto/aes.h b/include/crypto/aes.h
new file mode 100644
index 000000000000..d480b76715a8
--- /dev/null
+++ b/include/crypto/aes.h
@@ -0,0 +1,31 @@
1/*
2 * Common values for AES algorithms
3 */
4
5#ifndef _CRYPTO_AES_H
6#define _CRYPTO_AES_H
7
8#include <linux/types.h>
9#include <linux/crypto.h>
10
11#define AES_MIN_KEY_SIZE 16
12#define AES_MAX_KEY_SIZE 32
13#define AES_KEYSIZE_128 16
14#define AES_KEYSIZE_192 24
15#define AES_KEYSIZE_256 32
16#define AES_BLOCK_SIZE 16
17
18struct crypto_aes_ctx {
19 u32 key_length;
20 u32 key_enc[60];
21 u32 key_dec[60];
22};
23
24extern u32 crypto_ft_tab[4][256];
25extern u32 crypto_fl_tab[4][256];
26extern u32 crypto_it_tab[4][256];
27extern u32 crypto_il_tab[4][256];
28
29int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
30 unsigned int key_len);
31#endif
diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h
index b9b05d399d2b..60d06e784be3 100644
--- a/include/crypto/algapi.h
+++ b/include/crypto/algapi.h
@@ -111,8 +111,15 @@ void crypto_drop_spawn(struct crypto_spawn *spawn);
111struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 111struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
112 u32 mask); 112 u32 mask);
113 113
114static inline void crypto_set_spawn(struct crypto_spawn *spawn,
115 struct crypto_instance *inst)
116{
117 spawn->inst = inst;
118}
119
114struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb); 120struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
115int crypto_check_attr_type(struct rtattr **tb, u32 type); 121int crypto_check_attr_type(struct rtattr **tb, u32 type);
122const char *crypto_attr_alg_name(struct rtattr *rta);
116struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask); 123struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask);
117int crypto_attr_u32(struct rtattr *rta, u32 *num); 124int crypto_attr_u32(struct rtattr *rta, u32 *num);
118struct crypto_instance *crypto_alloc_instance(const char *name, 125struct crypto_instance *crypto_alloc_instance(const char *name,
@@ -124,6 +131,10 @@ int crypto_enqueue_request(struct crypto_queue *queue,
124struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); 131struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
125int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm); 132int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
126 133
134/* These functions require the input/output to be aligned as u32. */
135void crypto_inc(u8 *a, unsigned int size);
136void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
137
127int blkcipher_walk_done(struct blkcipher_desc *desc, 138int blkcipher_walk_done(struct blkcipher_desc *desc,
128 struct blkcipher_walk *walk, int err); 139 struct blkcipher_walk *walk, int err);
129int blkcipher_walk_virt(struct blkcipher_desc *desc, 140int blkcipher_walk_virt(struct blkcipher_desc *desc,
@@ -187,20 +198,11 @@ static inline struct crypto_instance *crypto_aead_alg_instance(
187 return crypto_tfm_alg_instance(&aead->base); 198 return crypto_tfm_alg_instance(&aead->base);
188} 199}
189 200
190static inline struct crypto_ablkcipher *crypto_spawn_ablkcipher(
191 struct crypto_spawn *spawn)
192{
193 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
194 u32 mask = CRYPTO_ALG_TYPE_MASK;
195
196 return __crypto_ablkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
197}
198
199static inline struct crypto_blkcipher *crypto_spawn_blkcipher( 201static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
200 struct crypto_spawn *spawn) 202 struct crypto_spawn *spawn)
201{ 203{
202 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER; 204 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
203 u32 mask = CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC; 205 u32 mask = CRYPTO_ALG_TYPE_MASK;
204 206
205 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask)); 207 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
206} 208}
@@ -303,5 +305,14 @@ static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
303 return crypto_attr_alg(tb[1], type, mask); 305 return crypto_attr_alg(tb[1], type, mask);
304} 306}
305 307
308/*
309 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
310 * Otherwise returns zero.
311 */
312static inline int crypto_requires_sync(u32 type, u32 mask)
313{
314 return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
315}
316
306#endif /* _CRYPTO_ALGAPI_H */ 317#endif /* _CRYPTO_ALGAPI_H */
307 318
diff --git a/include/crypto/authenc.h b/include/crypto/authenc.h
new file mode 100644
index 000000000000..e47b044929a8
--- /dev/null
+++ b/include/crypto/authenc.h
@@ -0,0 +1,27 @@
1/*
2 * Authenc: Simple AEAD wrapper for IPsec
3 *
4 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12#ifndef _CRYPTO_AUTHENC_H
13#define _CRYPTO_AUTHENC_H
14
15#include <linux/types.h>
16
17enum {
18 CRYPTO_AUTHENC_KEYA_UNSPEC,
19 CRYPTO_AUTHENC_KEYA_PARAM,
20};
21
22struct crypto_authenc_key_param {
23 __be32 enckeylen;
24};
25
26#endif /* _CRYPTO_AUTHENC_H */
27
diff --git a/include/crypto/ctr.h b/include/crypto/ctr.h
new file mode 100644
index 000000000000..4180fc080e3b
--- /dev/null
+++ b/include/crypto/ctr.h
@@ -0,0 +1,20 @@
1/*
2 * CTR: Counter mode
3 *
4 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#ifndef _CRYPTO_CTR_H
14#define _CRYPTO_CTR_H
15
16#define CTR_RFC3686_NONCE_SIZE 4
17#define CTR_RFC3686_IV_SIZE 8
18#define CTR_RFC3686_BLOCK_SIZE 16
19
20#endif /* _CRYPTO_CTR_H */
diff --git a/include/crypto/des.h b/include/crypto/des.h
new file mode 100644
index 000000000000..2971c6304ade
--- /dev/null
+++ b/include/crypto/des.h
@@ -0,0 +1,19 @@
1/*
2 * DES & Triple DES EDE Cipher Algorithms.
3 */
4
5#ifndef __CRYPTO_DES_H
6#define __CRYPTO_DES_H
7
8#define DES_KEY_SIZE 8
9#define DES_EXPKEY_WORDS 32
10#define DES_BLOCK_SIZE 8
11
12#define DES3_EDE_KEY_SIZE (3 * DES_KEY_SIZE)
13#define DES3_EDE_EXPKEY_WORDS (3 * DES_EXPKEY_WORDS)
14#define DES3_EDE_BLOCK_SIZE DES_BLOCK_SIZE
15
16
17extern unsigned long des_ekey(u32 *pe, const u8 *k);
18
19#endif /* __CRYPTO_DES_H */
diff --git a/include/crypto/internal/aead.h b/include/crypto/internal/aead.h
new file mode 100644
index 000000000000..d838c945575a
--- /dev/null
+++ b/include/crypto/internal/aead.h
@@ -0,0 +1,80 @@
1/*
2 * AEAD: Authenticated Encryption with Associated Data
3 *
4 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#ifndef _CRYPTO_INTERNAL_AEAD_H
14#define _CRYPTO_INTERNAL_AEAD_H
15
16#include <crypto/aead.h>
17#include <crypto/algapi.h>
18#include <linux/types.h>
19
20struct rtattr;
21
22struct crypto_aead_spawn {
23 struct crypto_spawn base;
24};
25
26extern const struct crypto_type crypto_nivaead_type;
27
28static inline void crypto_set_aead_spawn(
29 struct crypto_aead_spawn *spawn, struct crypto_instance *inst)
30{
31 crypto_set_spawn(&spawn->base, inst);
32}
33
34int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
35 u32 type, u32 mask);
36
37static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn)
38{
39 crypto_drop_spawn(&spawn->base);
40}
41
42static inline struct crypto_alg *crypto_aead_spawn_alg(
43 struct crypto_aead_spawn *spawn)
44{
45 return spawn->base.alg;
46}
47
48static inline struct crypto_aead *crypto_spawn_aead(
49 struct crypto_aead_spawn *spawn)
50{
51 return __crypto_aead_cast(
52 crypto_spawn_tfm(&spawn->base, CRYPTO_ALG_TYPE_AEAD,
53 CRYPTO_ALG_TYPE_MASK));
54}
55
56struct crypto_instance *aead_geniv_alloc(struct crypto_template *tmpl,
57 struct rtattr **tb, u32 type,
58 u32 mask);
59void aead_geniv_free(struct crypto_instance *inst);
60int aead_geniv_init(struct crypto_tfm *tfm);
61void aead_geniv_exit(struct crypto_tfm *tfm);
62
63static inline struct crypto_aead *aead_geniv_base(struct crypto_aead *geniv)
64{
65 return crypto_aead_crt(geniv)->base;
66}
67
68static inline void *aead_givcrypt_reqctx(struct aead_givcrypt_request *req)
69{
70 return aead_request_ctx(&req->areq);
71}
72
73static inline void aead_givcrypt_complete(struct aead_givcrypt_request *req,
74 int err)
75{
76 aead_request_complete(&req->areq, err);
77}
78
79#endif /* _CRYPTO_INTERNAL_AEAD_H */
80
diff --git a/include/crypto/internal/skcipher.h b/include/crypto/internal/skcipher.h
new file mode 100644
index 000000000000..2ba42cd7d6aa
--- /dev/null
+++ b/include/crypto/internal/skcipher.h
@@ -0,0 +1,110 @@
1/*
2 * Symmetric key ciphers.
3 *
4 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#ifndef _CRYPTO_INTERNAL_SKCIPHER_H
14#define _CRYPTO_INTERNAL_SKCIPHER_H
15
16#include <crypto/algapi.h>
17#include <crypto/skcipher.h>
18#include <linux/types.h>
19
20struct rtattr;
21
22struct crypto_skcipher_spawn {
23 struct crypto_spawn base;
24};
25
26extern const struct crypto_type crypto_givcipher_type;
27
28static inline void crypto_set_skcipher_spawn(
29 struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst)
30{
31 crypto_set_spawn(&spawn->base, inst);
32}
33
34int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
35 u32 type, u32 mask);
36
37static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
38{
39 crypto_drop_spawn(&spawn->base);
40}
41
42static inline struct crypto_alg *crypto_skcipher_spawn_alg(
43 struct crypto_skcipher_spawn *spawn)
44{
45 return spawn->base.alg;
46}
47
48static inline struct crypto_ablkcipher *crypto_spawn_skcipher(
49 struct crypto_skcipher_spawn *spawn)
50{
51 return __crypto_ablkcipher_cast(
52 crypto_spawn_tfm(&spawn->base, crypto_skcipher_type(0),
53 crypto_skcipher_mask(0)));
54}
55
56int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req);
57int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req);
58const char *crypto_default_geniv(const struct crypto_alg *alg);
59
60struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl,
61 struct rtattr **tb, u32 type,
62 u32 mask);
63void skcipher_geniv_free(struct crypto_instance *inst);
64int skcipher_geniv_init(struct crypto_tfm *tfm);
65void skcipher_geniv_exit(struct crypto_tfm *tfm);
66
67static inline struct crypto_ablkcipher *skcipher_geniv_cipher(
68 struct crypto_ablkcipher *geniv)
69{
70 return crypto_ablkcipher_crt(geniv)->base;
71}
72
73static inline int skcipher_enqueue_givcrypt(
74 struct crypto_queue *queue, struct skcipher_givcrypt_request *request)
75{
76 return ablkcipher_enqueue_request(queue, &request->creq);
77}
78
79static inline struct skcipher_givcrypt_request *skcipher_dequeue_givcrypt(
80 struct crypto_queue *queue)
81{
82 return container_of(ablkcipher_dequeue_request(queue),
83 struct skcipher_givcrypt_request, creq);
84}
85
86static inline void *skcipher_givcrypt_reqctx(
87 struct skcipher_givcrypt_request *req)
88{
89 return ablkcipher_request_ctx(&req->creq);
90}
91
92static inline void ablkcipher_request_complete(struct ablkcipher_request *req,
93 int err)
94{
95 req->base.complete(&req->base, err);
96}
97
98static inline void skcipher_givcrypt_complete(
99 struct skcipher_givcrypt_request *req, int err)
100{
101 ablkcipher_request_complete(&req->creq, err);
102}
103
104static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req)
105{
106 return req->base.flags;
107}
108
109#endif /* _CRYPTO_INTERNAL_SKCIPHER_H */
110
diff --git a/crypto/scatterwalk.h b/include/crypto/scatterwalk.h
index 87ed681cceba..224658b8d806 100644
--- a/crypto/scatterwalk.h
+++ b/include/crypto/scatterwalk.h
@@ -1,9 +1,10 @@
1/* 1/*
2 * Cryptographic API. 2 * Cryptographic scatter and gather helpers.
3 * 3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Adam J. Richter <adam@yggdrasil.com> 5 * Copyright (c) 2002 Adam J. Richter <adam@yggdrasil.com>
6 * Copyright (c) 2004 Jean-Luc Cooke <jlcooke@certainkey.com> 6 * Copyright (c) 2004 Jean-Luc Cooke <jlcooke@certainkey.com>
7 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
7 * 8 *
8 * This program is free software; you can redistribute it and/or modify it 9 * This program is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License as published by the Free 10 * under the terms of the GNU General Public License as published by the Free
@@ -15,14 +16,52 @@
15#ifndef _CRYPTO_SCATTERWALK_H 16#ifndef _CRYPTO_SCATTERWALK_H
16#define _CRYPTO_SCATTERWALK_H 17#define _CRYPTO_SCATTERWALK_H
17 18
19#include <asm/kmap_types.h>
20#include <crypto/algapi.h>
21#include <linux/hardirq.h>
22#include <linux/highmem.h>
23#include <linux/kernel.h>
18#include <linux/mm.h> 24#include <linux/mm.h>
19#include <linux/scatterlist.h> 25#include <linux/scatterlist.h>
26#include <linux/sched.h>
20 27
21#include "internal.h" 28static inline enum km_type crypto_kmap_type(int out)
29{
30 enum km_type type;
31
32 if (in_softirq())
33 type = out * (KM_SOFTIRQ1 - KM_SOFTIRQ0) + KM_SOFTIRQ0;
34 else
35 type = out * (KM_USER1 - KM_USER0) + KM_USER0;
36
37 return type;
38}
39
40static inline void *crypto_kmap(struct page *page, int out)
41{
42 return kmap_atomic(page, crypto_kmap_type(out));
43}
44
45static inline void crypto_kunmap(void *vaddr, int out)
46{
47 kunmap_atomic(vaddr, crypto_kmap_type(out));
48}
49
50static inline void crypto_yield(u32 flags)
51{
52 if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
53 cond_resched();
54}
55
56static inline void scatterwalk_sg_chain(struct scatterlist *sg1, int num,
57 struct scatterlist *sg2)
58{
59 sg_set_page(&sg1[num - 1], (void *)sg2, 0, 0);
60}
22 61
23static inline struct scatterlist *scatterwalk_sg_next(struct scatterlist *sg) 62static inline struct scatterlist *scatterwalk_sg_next(struct scatterlist *sg)
24{ 63{
25 return (++sg)->length ? sg : (void *) sg_page(sg); 64 return (++sg)->length ? sg : (void *)sg_page(sg);
26} 65}
27 66
28static inline unsigned long scatterwalk_samebuf(struct scatter_walk *walk_in, 67static inline unsigned long scatterwalk_samebuf(struct scatter_walk *walk_in,
diff --git a/include/crypto/sha.h b/include/crypto/sha.h
index 0686e1f7a24b..c0ccc2b1a2d8 100644
--- a/include/crypto/sha.h
+++ b/include/crypto/sha.h
@@ -8,6 +8,9 @@
8#define SHA1_DIGEST_SIZE 20 8#define SHA1_DIGEST_SIZE 20
9#define SHA1_BLOCK_SIZE 64 9#define SHA1_BLOCK_SIZE 64
10 10
11#define SHA224_DIGEST_SIZE 28
12#define SHA224_BLOCK_SIZE 64
13
11#define SHA256_DIGEST_SIZE 32 14#define SHA256_DIGEST_SIZE 32
12#define SHA256_BLOCK_SIZE 64 15#define SHA256_BLOCK_SIZE 64
13 16
@@ -23,6 +26,15 @@
23#define SHA1_H3 0x10325476UL 26#define SHA1_H3 0x10325476UL
24#define SHA1_H4 0xc3d2e1f0UL 27#define SHA1_H4 0xc3d2e1f0UL
25 28
29#define SHA224_H0 0xc1059ed8UL
30#define SHA224_H1 0x367cd507UL
31#define SHA224_H2 0x3070dd17UL
32#define SHA224_H3 0xf70e5939UL
33#define SHA224_H4 0xffc00b31UL
34#define SHA224_H5 0x68581511UL
35#define SHA224_H6 0x64f98fa7UL
36#define SHA224_H7 0xbefa4fa4UL
37
26#define SHA256_H0 0x6a09e667UL 38#define SHA256_H0 0x6a09e667UL
27#define SHA256_H1 0xbb67ae85UL 39#define SHA256_H1 0xbb67ae85UL
28#define SHA256_H2 0x3c6ef372UL 40#define SHA256_H2 0x3c6ef372UL
diff --git a/include/crypto/skcipher.h b/include/crypto/skcipher.h
new file mode 100644
index 000000000000..25fd6126522d
--- /dev/null
+++ b/include/crypto/skcipher.h
@@ -0,0 +1,110 @@
1/*
2 * Symmetric key ciphers.
3 *
4 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#ifndef _CRYPTO_SKCIPHER_H
14#define _CRYPTO_SKCIPHER_H
15
16#include <linux/crypto.h>
17#include <linux/kernel.h>
18#include <linux/slab.h>
19
20/**
21 * struct skcipher_givcrypt_request - Crypto request with IV generation
22 * @seq: Sequence number for IV generation
23 * @giv: Space for generated IV
24 * @creq: The crypto request itself
25 */
26struct skcipher_givcrypt_request {
27 u64 seq;
28 u8 *giv;
29
30 struct ablkcipher_request creq;
31};
32
33static inline struct crypto_ablkcipher *skcipher_givcrypt_reqtfm(
34 struct skcipher_givcrypt_request *req)
35{
36 return crypto_ablkcipher_reqtfm(&req->creq);
37}
38
39static inline int crypto_skcipher_givencrypt(
40 struct skcipher_givcrypt_request *req)
41{
42 struct ablkcipher_tfm *crt =
43 crypto_ablkcipher_crt(skcipher_givcrypt_reqtfm(req));
44 return crt->givencrypt(req);
45};
46
47static inline int crypto_skcipher_givdecrypt(
48 struct skcipher_givcrypt_request *req)
49{
50 struct ablkcipher_tfm *crt =
51 crypto_ablkcipher_crt(skcipher_givcrypt_reqtfm(req));
52 return crt->givdecrypt(req);
53};
54
55static inline void skcipher_givcrypt_set_tfm(
56 struct skcipher_givcrypt_request *req, struct crypto_ablkcipher *tfm)
57{
58 req->creq.base.tfm = crypto_ablkcipher_tfm(tfm);
59}
60
61static inline struct skcipher_givcrypt_request *skcipher_givcrypt_cast(
62 struct crypto_async_request *req)
63{
64 return container_of(ablkcipher_request_cast(req),
65 struct skcipher_givcrypt_request, creq);
66}
67
68static inline struct skcipher_givcrypt_request *skcipher_givcrypt_alloc(
69 struct crypto_ablkcipher *tfm, gfp_t gfp)
70{
71 struct skcipher_givcrypt_request *req;
72
73 req = kmalloc(sizeof(struct skcipher_givcrypt_request) +
74 crypto_ablkcipher_reqsize(tfm), gfp);
75
76 if (likely(req))
77 skcipher_givcrypt_set_tfm(req, tfm);
78
79 return req;
80}
81
82static inline void skcipher_givcrypt_free(struct skcipher_givcrypt_request *req)
83{
84 kfree(req);
85}
86
87static inline void skcipher_givcrypt_set_callback(
88 struct skcipher_givcrypt_request *req, u32 flags,
89 crypto_completion_t complete, void *data)
90{
91 ablkcipher_request_set_callback(&req->creq, flags, complete, data);
92}
93
94static inline void skcipher_givcrypt_set_crypt(
95 struct skcipher_givcrypt_request *req,
96 struct scatterlist *src, struct scatterlist *dst,
97 unsigned int nbytes, void *iv)
98{
99 ablkcipher_request_set_crypt(&req->creq, src, dst, nbytes, iv);
100}
101
102static inline void skcipher_givcrypt_set_giv(
103 struct skcipher_givcrypt_request *req, u8 *giv, u64 seq)
104{
105 req->giv = giv;
106 req->seq = seq;
107}
108
109#endif /* _CRYPTO_SKCIPHER_H */
110
diff --git a/include/linux/crypto.h b/include/linux/crypto.h
index f3110ebe894a..5e02d1b46370 100644
--- a/include/linux/crypto.h
+++ b/include/linux/crypto.h
@@ -33,10 +33,13 @@
33#define CRYPTO_ALG_TYPE_DIGEST 0x00000002 33#define CRYPTO_ALG_TYPE_DIGEST 0x00000002
34#define CRYPTO_ALG_TYPE_HASH 0x00000003 34#define CRYPTO_ALG_TYPE_HASH 0x00000003
35#define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004 35#define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004
36#define CRYPTO_ALG_TYPE_COMPRESS 0x00000005 36#define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005
37#define CRYPTO_ALG_TYPE_AEAD 0x00000006 37#define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006
38#define CRYPTO_ALG_TYPE_COMPRESS 0x00000008
39#define CRYPTO_ALG_TYPE_AEAD 0x00000009
38 40
39#define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e 41#define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e
42#define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c
40 43
41#define CRYPTO_ALG_LARVAL 0x00000010 44#define CRYPTO_ALG_LARVAL 0x00000010
42#define CRYPTO_ALG_DEAD 0x00000020 45#define CRYPTO_ALG_DEAD 0x00000020
@@ -50,6 +53,12 @@
50#define CRYPTO_ALG_NEED_FALLBACK 0x00000100 53#define CRYPTO_ALG_NEED_FALLBACK 0x00000100
51 54
52/* 55/*
56 * This bit is set for symmetric key ciphers that have already been wrapped
57 * with a generic IV generator to prevent them from being wrapped again.
58 */
59#define CRYPTO_ALG_GENIV 0x00000200
60
61/*
53 * Transform masks and values (for crt_flags). 62 * Transform masks and values (for crt_flags).
54 */ 63 */
55#define CRYPTO_TFM_REQ_MASK 0x000fff00 64#define CRYPTO_TFM_REQ_MASK 0x000fff00
@@ -81,13 +90,11 @@
81#define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN 90#define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
82#elif defined(ARCH_SLAB_MINALIGN) 91#elif defined(ARCH_SLAB_MINALIGN)
83#define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN 92#define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN
93#else
94#define CRYPTO_MINALIGN __alignof__(unsigned long long)
84#endif 95#endif
85 96
86#ifdef CRYPTO_MINALIGN
87#define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN))) 97#define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
88#else
89#define CRYPTO_MINALIGN_ATTR
90#endif
91 98
92struct scatterlist; 99struct scatterlist;
93struct crypto_ablkcipher; 100struct crypto_ablkcipher;
@@ -97,6 +104,8 @@ struct crypto_blkcipher;
97struct crypto_hash; 104struct crypto_hash;
98struct crypto_tfm; 105struct crypto_tfm;
99struct crypto_type; 106struct crypto_type;
107struct aead_givcrypt_request;
108struct skcipher_givcrypt_request;
100 109
101typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err); 110typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
102 111
@@ -176,6 +185,10 @@ struct ablkcipher_alg {
176 unsigned int keylen); 185 unsigned int keylen);
177 int (*encrypt)(struct ablkcipher_request *req); 186 int (*encrypt)(struct ablkcipher_request *req);
178 int (*decrypt)(struct ablkcipher_request *req); 187 int (*decrypt)(struct ablkcipher_request *req);
188 int (*givencrypt)(struct skcipher_givcrypt_request *req);
189 int (*givdecrypt)(struct skcipher_givcrypt_request *req);
190
191 const char *geniv;
179 192
180 unsigned int min_keysize; 193 unsigned int min_keysize;
181 unsigned int max_keysize; 194 unsigned int max_keysize;
@@ -185,11 +198,16 @@ struct ablkcipher_alg {
185struct aead_alg { 198struct aead_alg {
186 int (*setkey)(struct crypto_aead *tfm, const u8 *key, 199 int (*setkey)(struct crypto_aead *tfm, const u8 *key,
187 unsigned int keylen); 200 unsigned int keylen);
201 int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize);
188 int (*encrypt)(struct aead_request *req); 202 int (*encrypt)(struct aead_request *req);
189 int (*decrypt)(struct aead_request *req); 203 int (*decrypt)(struct aead_request *req);
204 int (*givencrypt)(struct aead_givcrypt_request *req);
205 int (*givdecrypt)(struct aead_givcrypt_request *req);
206
207 const char *geniv;
190 208
191 unsigned int ivsize; 209 unsigned int ivsize;
192 unsigned int authsize; 210 unsigned int maxauthsize;
193}; 211};
194 212
195struct blkcipher_alg { 213struct blkcipher_alg {
@@ -202,6 +220,8 @@ struct blkcipher_alg {
202 struct scatterlist *dst, struct scatterlist *src, 220 struct scatterlist *dst, struct scatterlist *src,
203 unsigned int nbytes); 221 unsigned int nbytes);
204 222
223 const char *geniv;
224
205 unsigned int min_keysize; 225 unsigned int min_keysize;
206 unsigned int max_keysize; 226 unsigned int max_keysize;
207 unsigned int ivsize; 227 unsigned int ivsize;
@@ -317,6 +337,11 @@ struct ablkcipher_tfm {
317 unsigned int keylen); 337 unsigned int keylen);
318 int (*encrypt)(struct ablkcipher_request *req); 338 int (*encrypt)(struct ablkcipher_request *req);
319 int (*decrypt)(struct ablkcipher_request *req); 339 int (*decrypt)(struct ablkcipher_request *req);
340 int (*givencrypt)(struct skcipher_givcrypt_request *req);
341 int (*givdecrypt)(struct skcipher_givcrypt_request *req);
342
343 struct crypto_ablkcipher *base;
344
320 unsigned int ivsize; 345 unsigned int ivsize;
321 unsigned int reqsize; 346 unsigned int reqsize;
322}; 347};
@@ -326,6 +351,11 @@ struct aead_tfm {
326 unsigned int keylen); 351 unsigned int keylen);
327 int (*encrypt)(struct aead_request *req); 352 int (*encrypt)(struct aead_request *req);
328 int (*decrypt)(struct aead_request *req); 353 int (*decrypt)(struct aead_request *req);
354 int (*givencrypt)(struct aead_givcrypt_request *req);
355 int (*givdecrypt)(struct aead_givcrypt_request *req);
356
357 struct crypto_aead *base;
358
329 unsigned int ivsize; 359 unsigned int ivsize;
330 unsigned int authsize; 360 unsigned int authsize;
331 unsigned int reqsize; 361 unsigned int reqsize;
@@ -525,17 +555,23 @@ static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
525 return (struct crypto_ablkcipher *)tfm; 555 return (struct crypto_ablkcipher *)tfm;
526} 556}
527 557
528static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher( 558static inline u32 crypto_skcipher_type(u32 type)
529 const char *alg_name, u32 type, u32 mask)
530{ 559{
531 type &= ~CRYPTO_ALG_TYPE_MASK; 560 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
532 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 561 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
533 mask |= CRYPTO_ALG_TYPE_MASK; 562 return type;
563}
534 564
535 return __crypto_ablkcipher_cast( 565static inline u32 crypto_skcipher_mask(u32 mask)
536 crypto_alloc_base(alg_name, type, mask)); 566{
567 mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
568 mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
569 return mask;
537} 570}
538 571
572struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name,
573 u32 type, u32 mask);
574
539static inline struct crypto_tfm *crypto_ablkcipher_tfm( 575static inline struct crypto_tfm *crypto_ablkcipher_tfm(
540 struct crypto_ablkcipher *tfm) 576 struct crypto_ablkcipher *tfm)
541{ 577{
@@ -550,11 +586,8 @@ static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
550static inline int crypto_has_ablkcipher(const char *alg_name, u32 type, 586static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
551 u32 mask) 587 u32 mask)
552{ 588{
553 type &= ~CRYPTO_ALG_TYPE_MASK; 589 return crypto_has_alg(alg_name, crypto_skcipher_type(type),
554 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 590 crypto_skcipher_mask(mask));
555 mask |= CRYPTO_ALG_TYPE_MASK;
556
557 return crypto_has_alg(alg_name, type, mask);
558} 591}
559 592
560static inline struct ablkcipher_tfm *crypto_ablkcipher_crt( 593static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
@@ -601,7 +634,9 @@ static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
601static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm, 634static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
602 const u8 *key, unsigned int keylen) 635 const u8 *key, unsigned int keylen)
603{ 636{
604 return crypto_ablkcipher_crt(tfm)->setkey(tfm, key, keylen); 637 struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm);
638
639 return crt->setkey(crt->base, key, keylen);
605} 640}
606 641
607static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm( 642static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
@@ -633,7 +668,7 @@ static inline unsigned int crypto_ablkcipher_reqsize(
633static inline void ablkcipher_request_set_tfm( 668static inline void ablkcipher_request_set_tfm(
634 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm) 669 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm)
635{ 670{
636 req->base.tfm = crypto_ablkcipher_tfm(tfm); 671 req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base);
637} 672}
638 673
639static inline struct ablkcipher_request *ablkcipher_request_cast( 674static inline struct ablkcipher_request *ablkcipher_request_cast(
@@ -686,15 +721,7 @@ static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm)
686 return (struct crypto_aead *)tfm; 721 return (struct crypto_aead *)tfm;
687} 722}
688 723
689static inline struct crypto_aead *crypto_alloc_aead(const char *alg_name, 724struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask);
690 u32 type, u32 mask)
691{
692 type &= ~CRYPTO_ALG_TYPE_MASK;
693 type |= CRYPTO_ALG_TYPE_AEAD;
694 mask |= CRYPTO_ALG_TYPE_MASK;
695
696 return __crypto_aead_cast(crypto_alloc_base(alg_name, type, mask));
697}
698 725
699static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm) 726static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm)
700{ 727{
@@ -749,9 +776,13 @@ static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags)
749static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key, 776static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key,
750 unsigned int keylen) 777 unsigned int keylen)
751{ 778{
752 return crypto_aead_crt(tfm)->setkey(tfm, key, keylen); 779 struct aead_tfm *crt = crypto_aead_crt(tfm);
780
781 return crt->setkey(crt->base, key, keylen);
753} 782}
754 783
784int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize);
785
755static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req) 786static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req)
756{ 787{
757 return __crypto_aead_cast(req->base.tfm); 788 return __crypto_aead_cast(req->base.tfm);
@@ -775,7 +806,7 @@ static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm)
775static inline void aead_request_set_tfm(struct aead_request *req, 806static inline void aead_request_set_tfm(struct aead_request *req,
776 struct crypto_aead *tfm) 807 struct crypto_aead *tfm)
777{ 808{
778 req->base.tfm = crypto_aead_tfm(tfm); 809 req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base);
779} 810}
780 811
781static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm, 812static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm,
@@ -841,9 +872,9 @@ static inline struct crypto_blkcipher *crypto_blkcipher_cast(
841static inline struct crypto_blkcipher *crypto_alloc_blkcipher( 872static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
842 const char *alg_name, u32 type, u32 mask) 873 const char *alg_name, u32 type, u32 mask)
843{ 874{
844 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 875 type &= ~CRYPTO_ALG_TYPE_MASK;
845 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 876 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
846 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC; 877 mask |= CRYPTO_ALG_TYPE_MASK;
847 878
848 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask)); 879 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
849} 880}
@@ -861,9 +892,9 @@ static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
861 892
862static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask) 893static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
863{ 894{
864 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 895 type &= ~CRYPTO_ALG_TYPE_MASK;
865 type |= CRYPTO_ALG_TYPE_BLKCIPHER; 896 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
866 mask |= CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC; 897 mask |= CRYPTO_ALG_TYPE_MASK;
867 898
868 return crypto_has_alg(alg_name, type, mask); 899 return crypto_has_alg(alg_name, type, mask);
869} 900}
@@ -1081,6 +1112,7 @@ static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name,
1081 u32 type, u32 mask) 1112 u32 type, u32 mask)
1082{ 1113{
1083 type &= ~CRYPTO_ALG_TYPE_MASK; 1114 type &= ~CRYPTO_ALG_TYPE_MASK;
1115 mask &= ~CRYPTO_ALG_TYPE_MASK;
1084 type |= CRYPTO_ALG_TYPE_HASH; 1116 type |= CRYPTO_ALG_TYPE_HASH;
1085 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1117 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1086 1118
@@ -1100,6 +1132,7 @@ static inline void crypto_free_hash(struct crypto_hash *tfm)
1100static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask) 1132static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask)
1101{ 1133{
1102 type &= ~CRYPTO_ALG_TYPE_MASK; 1134 type &= ~CRYPTO_ALG_TYPE_MASK;
1135 mask &= ~CRYPTO_ALG_TYPE_MASK;
1103 type |= CRYPTO_ALG_TYPE_HASH; 1136 type |= CRYPTO_ALG_TYPE_HASH;
1104 mask |= CRYPTO_ALG_TYPE_HASH_MASK; 1137 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1105 1138
diff --git a/include/linux/hw_random.h b/include/linux/hw_random.h
index 21ea7610e177..85d11916e9ea 100644
--- a/include/linux/hw_random.h
+++ b/include/linux/hw_random.h
@@ -33,7 +33,7 @@ struct hwrng {
33 const char *name; 33 const char *name;
34 int (*init)(struct hwrng *rng); 34 int (*init)(struct hwrng *rng);
35 void (*cleanup)(struct hwrng *rng); 35 void (*cleanup)(struct hwrng *rng);
36 int (*data_present)(struct hwrng *rng); 36 int (*data_present)(struct hwrng *rng, int wait);
37 int (*data_read)(struct hwrng *rng, u32 *data); 37 int (*data_read)(struct hwrng *rng, u32 *data);
38 unsigned long priv; 38 unsigned long priv;
39 39