aboutsummaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorJeff Garzik <jeff@garzik.org>2006-09-22 20:10:23 -0400
committerJeff Garzik <jeff@garzik.org>2006-09-22 20:10:23 -0400
commit28eb177dfa5982d132edceed891cb3885df258bb (patch)
tree5f8fdc37ad1d8d0793e9c47da7d908b97c814ffb /crypto
parentfd8ae94eea9bb4269d6dff1b47b9dc741bd70d0b (diff)
parentdb392219c5f572610645696e3672f6ea38783a65 (diff)
Merge branch 'master' into upstream
Conflicts: net/ieee80211/ieee80211_crypt_tkip.c net/ieee80211/ieee80211_crypt_wep.c
Diffstat (limited to 'crypto')
-rw-r--r--crypto/Kconfig154
-rw-r--r--crypto/Makefile16
-rw-r--r--crypto/aes.c5
-rw-r--r--crypto/algapi.c486
-rw-r--r--crypto/anubis.c3
-rw-r--r--crypto/api.c428
-rw-r--r--crypto/arc4.c2
-rw-r--r--crypto/blkcipher.c405
-rw-r--r--crypto/blowfish.c3
-rw-r--r--crypto/cast5.c8
-rw-r--r--crypto/cast6.c5
-rw-r--r--crypto/cbc.c344
-rw-r--r--crypto/cipher.c117
-rw-r--r--crypto/crc32c.c30
-rw-r--r--crypto/crypto_null.c2
-rw-r--r--crypto/cryptomgr.c156
-rw-r--r--crypto/des.c6
-rw-r--r--crypto/digest.c155
-rw-r--r--crypto/ecb.c181
-rw-r--r--crypto/hash.c61
-rw-r--r--crypto/hmac.c278
-rw-r--r--crypto/internal.h106
-rw-r--r--crypto/khazad.c8
-rw-r--r--crypto/michael_mic.c5
-rw-r--r--crypto/proc.c13
-rw-r--r--crypto/scatterwalk.c89
-rw-r--r--crypto/scatterwalk.h52
-rw-r--r--crypto/serpent.c19
-rw-r--r--crypto/sha1.c3
-rw-r--r--crypto/sha256.c3
-rw-r--r--crypto/tcrypt.c901
-rw-r--r--crypto/tcrypt.h202
-rw-r--r--crypto/tea.c16
-rw-r--r--crypto/twofish.c700
-rw-r--r--crypto/twofish_common.c744
35 files changed, 4103 insertions, 1603 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index ba133d557045..1e2f39c21180 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -9,47 +9,71 @@ config CRYPTO
9 help 9 help
10 This option provides the core Cryptographic API. 10 This option provides the core Cryptographic API.
11 11
12if CRYPTO
13
14config CRYPTO_ALGAPI
15 tristate
16 help
17 This option provides the API for cryptographic algorithms.
18
19config CRYPTO_BLKCIPHER
20 tristate
21 select CRYPTO_ALGAPI
22
23config CRYPTO_HASH
24 tristate
25 select CRYPTO_ALGAPI
26
27config CRYPTO_MANAGER
28 tristate "Cryptographic algorithm manager"
29 select CRYPTO_ALGAPI
30 default m
31 help
32 Create default cryptographic template instantiations such as
33 cbc(aes).
34
12config CRYPTO_HMAC 35config CRYPTO_HMAC
13 bool "HMAC support" 36 tristate "HMAC support"
14 depends on CRYPTO 37 select CRYPTO_HASH
15 help 38 help
16 HMAC: Keyed-Hashing for Message Authentication (RFC2104). 39 HMAC: Keyed-Hashing for Message Authentication (RFC2104).
17 This is required for IPSec. 40 This is required for IPSec.
18 41
19config CRYPTO_NULL 42config CRYPTO_NULL
20 tristate "Null algorithms" 43 tristate "Null algorithms"
21 depends on CRYPTO 44 select CRYPTO_ALGAPI
22 help 45 help
23 These are 'Null' algorithms, used by IPsec, which do nothing. 46 These are 'Null' algorithms, used by IPsec, which do nothing.
24 47
25config CRYPTO_MD4 48config CRYPTO_MD4
26 tristate "MD4 digest algorithm" 49 tristate "MD4 digest algorithm"
27 depends on CRYPTO 50 select CRYPTO_ALGAPI
28 help 51 help
29 MD4 message digest algorithm (RFC1320). 52 MD4 message digest algorithm (RFC1320).
30 53
31config CRYPTO_MD5 54config CRYPTO_MD5
32 tristate "MD5 digest algorithm" 55 tristate "MD5 digest algorithm"
33 depends on CRYPTO 56 select CRYPTO_ALGAPI
34 help 57 help
35 MD5 message digest algorithm (RFC1321). 58 MD5 message digest algorithm (RFC1321).
36 59
37config CRYPTO_SHA1 60config CRYPTO_SHA1
38 tristate "SHA1 digest algorithm" 61 tristate "SHA1 digest algorithm"
39 depends on CRYPTO 62 select CRYPTO_ALGAPI
40 help 63 help
41 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). 64 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
42 65
43config CRYPTO_SHA1_S390 66config CRYPTO_SHA1_S390
44 tristate "SHA1 digest algorithm (s390)" 67 tristate "SHA1 digest algorithm (s390)"
45 depends on CRYPTO && S390 68 depends on S390
69 select CRYPTO_ALGAPI
46 help 70 help
47 This is the s390 hardware accelerated implementation of the 71 This is the s390 hardware accelerated implementation of the
48 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). 72 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
49 73
50config CRYPTO_SHA256 74config CRYPTO_SHA256
51 tristate "SHA256 digest algorithm" 75 tristate "SHA256 digest algorithm"
52 depends on CRYPTO 76 select CRYPTO_ALGAPI
53 help 77 help
54 SHA256 secure hash standard (DFIPS 180-2). 78 SHA256 secure hash standard (DFIPS 180-2).
55 79
@@ -58,7 +82,8 @@ config CRYPTO_SHA256
58 82
59config CRYPTO_SHA256_S390 83config CRYPTO_SHA256_S390
60 tristate "SHA256 digest algorithm (s390)" 84 tristate "SHA256 digest algorithm (s390)"
61 depends on CRYPTO && S390 85 depends on S390
86 select CRYPTO_ALGAPI
62 help 87 help
63 This is the s390 hardware accelerated implementation of the 88 This is the s390 hardware accelerated implementation of the
64 SHA256 secure hash standard (DFIPS 180-2). 89 SHA256 secure hash standard (DFIPS 180-2).
@@ -68,7 +93,7 @@ config CRYPTO_SHA256_S390
68 93
69config CRYPTO_SHA512 94config CRYPTO_SHA512
70 tristate "SHA384 and SHA512 digest algorithms" 95 tristate "SHA384 and SHA512 digest algorithms"
71 depends on CRYPTO 96 select CRYPTO_ALGAPI
72 help 97 help
73 SHA512 secure hash standard (DFIPS 180-2). 98 SHA512 secure hash standard (DFIPS 180-2).
74 99
@@ -80,7 +105,7 @@ config CRYPTO_SHA512
80 105
81config CRYPTO_WP512 106config CRYPTO_WP512
82 tristate "Whirlpool digest algorithms" 107 tristate "Whirlpool digest algorithms"
83 depends on CRYPTO 108 select CRYPTO_ALGAPI
84 help 109 help
85 Whirlpool hash algorithm 512, 384 and 256-bit hashes 110 Whirlpool hash algorithm 512, 384 and 256-bit hashes
86 111
@@ -92,7 +117,7 @@ config CRYPTO_WP512
92 117
93config CRYPTO_TGR192 118config CRYPTO_TGR192
94 tristate "Tiger digest algorithms" 119 tristate "Tiger digest algorithms"
95 depends on CRYPTO 120 select CRYPTO_ALGAPI
96 help 121 help
97 Tiger hash algorithm 192, 160 and 128-bit hashes 122 Tiger hash algorithm 192, 160 and 128-bit hashes
98 123
@@ -103,21 +128,40 @@ config CRYPTO_TGR192
103 See also: 128 See also:
104 <http://www.cs.technion.ac.il/~biham/Reports/Tiger/>. 129 <http://www.cs.technion.ac.il/~biham/Reports/Tiger/>.
105 130
131config CRYPTO_ECB
132 tristate "ECB support"
133 select CRYPTO_BLKCIPHER
134 default m
135 help
136 ECB: Electronic CodeBook mode
137 This is the simplest block cipher algorithm. It simply encrypts
138 the input block by block.
139
140config CRYPTO_CBC
141 tristate "CBC support"
142 select CRYPTO_BLKCIPHER
143 default m
144 help
145 CBC: Cipher Block Chaining mode
146 This block cipher algorithm is required for IPSec.
147
106config CRYPTO_DES 148config CRYPTO_DES
107 tristate "DES and Triple DES EDE cipher algorithms" 149 tristate "DES and Triple DES EDE cipher algorithms"
108 depends on CRYPTO 150 select CRYPTO_ALGAPI
109 help 151 help
110 DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3). 152 DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3).
111 153
112config CRYPTO_DES_S390 154config CRYPTO_DES_S390
113 tristate "DES and Triple DES cipher algorithms (s390)" 155 tristate "DES and Triple DES cipher algorithms (s390)"
114 depends on CRYPTO && S390 156 depends on S390
157 select CRYPTO_ALGAPI
158 select CRYPTO_BLKCIPHER
115 help 159 help
116 DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3). 160 DES cipher algorithm (FIPS 46-2), and Triple DES EDE (FIPS 46-3).
117 161
118config CRYPTO_BLOWFISH 162config CRYPTO_BLOWFISH
119 tristate "Blowfish cipher algorithm" 163 tristate "Blowfish cipher algorithm"
120 depends on CRYPTO 164 select CRYPTO_ALGAPI
121 help 165 help
122 Blowfish cipher algorithm, by Bruce Schneier. 166 Blowfish cipher algorithm, by Bruce Schneier.
123 167
@@ -130,7 +174,8 @@ config CRYPTO_BLOWFISH
130 174
131config CRYPTO_TWOFISH 175config CRYPTO_TWOFISH
132 tristate "Twofish cipher algorithm" 176 tristate "Twofish cipher algorithm"
133 depends on CRYPTO 177 select CRYPTO_ALGAPI
178 select CRYPTO_TWOFISH_COMMON
134 help 179 help
135 Twofish cipher algorithm. 180 Twofish cipher algorithm.
136 181
@@ -142,9 +187,47 @@ config CRYPTO_TWOFISH
142 See also: 187 See also:
143 <http://www.schneier.com/twofish.html> 188 <http://www.schneier.com/twofish.html>
144 189
190config CRYPTO_TWOFISH_COMMON
191 tristate
192 help
193 Common parts of the Twofish cipher algorithm shared by the
194 generic c and the assembler implementations.
195
196config CRYPTO_TWOFISH_586
197 tristate "Twofish cipher algorithms (i586)"
198 depends on (X86 || UML_X86) && !64BIT
199 select CRYPTO_ALGAPI
200 select CRYPTO_TWOFISH_COMMON
201 help
202 Twofish cipher algorithm.
203
204 Twofish was submitted as an AES (Advanced Encryption Standard)
205 candidate cipher by researchers at CounterPane Systems. It is a
206 16 round block cipher supporting key sizes of 128, 192, and 256
207 bits.
208
209 See also:
210 <http://www.schneier.com/twofish.html>
211
212config CRYPTO_TWOFISH_X86_64
213 tristate "Twofish cipher algorithm (x86_64)"
214 depends on (X86 || UML_X86) && 64BIT
215 select CRYPTO_ALGAPI
216 select CRYPTO_TWOFISH_COMMON
217 help
218 Twofish cipher algorithm (x86_64).
219
220 Twofish was submitted as an AES (Advanced Encryption Standard)
221 candidate cipher by researchers at CounterPane Systems. It is a
222 16 round block cipher supporting key sizes of 128, 192, and 256
223 bits.
224
225 See also:
226 <http://www.schneier.com/twofish.html>
227
145config CRYPTO_SERPENT 228config CRYPTO_SERPENT
146 tristate "Serpent cipher algorithm" 229 tristate "Serpent cipher algorithm"
147 depends on CRYPTO 230 select CRYPTO_ALGAPI
148 help 231 help
149 Serpent cipher algorithm, by Anderson, Biham & Knudsen. 232 Serpent cipher algorithm, by Anderson, Biham & Knudsen.
150 233
@@ -157,7 +240,7 @@ config CRYPTO_SERPENT
157 240
158config CRYPTO_AES 241config CRYPTO_AES
159 tristate "AES cipher algorithms" 242 tristate "AES cipher algorithms"
160 depends on CRYPTO 243 select CRYPTO_ALGAPI
161 help 244 help
162 AES cipher algorithms (FIPS-197). AES uses the Rijndael 245 AES cipher algorithms (FIPS-197). AES uses the Rijndael
163 algorithm. 246 algorithm.
@@ -177,7 +260,8 @@ config CRYPTO_AES
177 260
178config CRYPTO_AES_586 261config CRYPTO_AES_586
179 tristate "AES cipher algorithms (i586)" 262 tristate "AES cipher algorithms (i586)"
180 depends on CRYPTO && ((X86 || UML_X86) && !64BIT) 263 depends on (X86 || UML_X86) && !64BIT
264 select CRYPTO_ALGAPI
181 help 265 help
182 AES cipher algorithms (FIPS-197). AES uses the Rijndael 266 AES cipher algorithms (FIPS-197). AES uses the Rijndael
183 algorithm. 267 algorithm.
@@ -197,7 +281,8 @@ config CRYPTO_AES_586
197 281
198config CRYPTO_AES_X86_64 282config CRYPTO_AES_X86_64
199 tristate "AES cipher algorithms (x86_64)" 283 tristate "AES cipher algorithms (x86_64)"
200 depends on CRYPTO && ((X86 || UML_X86) && 64BIT) 284 depends on (X86 || UML_X86) && 64BIT
285 select CRYPTO_ALGAPI
201 help 286 help
202 AES cipher algorithms (FIPS-197). AES uses the Rijndael 287 AES cipher algorithms (FIPS-197). AES uses the Rijndael
203 algorithm. 288 algorithm.
@@ -217,7 +302,9 @@ config CRYPTO_AES_X86_64
217 302
218config CRYPTO_AES_S390 303config CRYPTO_AES_S390
219 tristate "AES cipher algorithms (s390)" 304 tristate "AES cipher algorithms (s390)"
220 depends on CRYPTO && S390 305 depends on S390
306 select CRYPTO_ALGAPI
307 select CRYPTO_BLKCIPHER
221 help 308 help
222 This is the s390 hardware accelerated implementation of the 309 This is the s390 hardware accelerated implementation of the
223 AES cipher algorithms (FIPS-197). AES uses the Rijndael 310 AES cipher algorithms (FIPS-197). AES uses the Rijndael
@@ -237,21 +324,21 @@ config CRYPTO_AES_S390
237 324
238config CRYPTO_CAST5 325config CRYPTO_CAST5
239 tristate "CAST5 (CAST-128) cipher algorithm" 326 tristate "CAST5 (CAST-128) cipher algorithm"
240 depends on CRYPTO 327 select CRYPTO_ALGAPI
241 help 328 help
242 The CAST5 encryption algorithm (synonymous with CAST-128) is 329 The CAST5 encryption algorithm (synonymous with CAST-128) is
243 described in RFC2144. 330 described in RFC2144.
244 331
245config CRYPTO_CAST6 332config CRYPTO_CAST6
246 tristate "CAST6 (CAST-256) cipher algorithm" 333 tristate "CAST6 (CAST-256) cipher algorithm"
247 depends on CRYPTO 334 select CRYPTO_ALGAPI
248 help 335 help
249 The CAST6 encryption algorithm (synonymous with CAST-256) is 336 The CAST6 encryption algorithm (synonymous with CAST-256) is
250 described in RFC2612. 337 described in RFC2612.
251 338
252config CRYPTO_TEA 339config CRYPTO_TEA
253 tristate "TEA, XTEA and XETA cipher algorithms" 340 tristate "TEA, XTEA and XETA cipher algorithms"
254 depends on CRYPTO 341 select CRYPTO_ALGAPI
255 help 342 help
256 TEA cipher algorithm. 343 TEA cipher algorithm.
257 344
@@ -268,7 +355,7 @@ config CRYPTO_TEA
268 355
269config CRYPTO_ARC4 356config CRYPTO_ARC4
270 tristate "ARC4 cipher algorithm" 357 tristate "ARC4 cipher algorithm"
271 depends on CRYPTO 358 select CRYPTO_ALGAPI
272 help 359 help
273 ARC4 cipher algorithm. 360 ARC4 cipher algorithm.
274 361
@@ -279,7 +366,7 @@ config CRYPTO_ARC4
279 366
280config CRYPTO_KHAZAD 367config CRYPTO_KHAZAD
281 tristate "Khazad cipher algorithm" 368 tristate "Khazad cipher algorithm"
282 depends on CRYPTO 369 select CRYPTO_ALGAPI
283 help 370 help
284 Khazad cipher algorithm. 371 Khazad cipher algorithm.
285 372
@@ -292,7 +379,7 @@ config CRYPTO_KHAZAD
292 379
293config CRYPTO_ANUBIS 380config CRYPTO_ANUBIS
294 tristate "Anubis cipher algorithm" 381 tristate "Anubis cipher algorithm"
295 depends on CRYPTO 382 select CRYPTO_ALGAPI
296 help 383 help
297 Anubis cipher algorithm. 384 Anubis cipher algorithm.
298 385
@@ -307,7 +394,7 @@ config CRYPTO_ANUBIS
307 394
308config CRYPTO_DEFLATE 395config CRYPTO_DEFLATE
309 tristate "Deflate compression algorithm" 396 tristate "Deflate compression algorithm"
310 depends on CRYPTO 397 select CRYPTO_ALGAPI
311 select ZLIB_INFLATE 398 select ZLIB_INFLATE
312 select ZLIB_DEFLATE 399 select ZLIB_DEFLATE
313 help 400 help
@@ -318,7 +405,7 @@ config CRYPTO_DEFLATE
318 405
319config CRYPTO_MICHAEL_MIC 406config CRYPTO_MICHAEL_MIC
320 tristate "Michael MIC keyed digest algorithm" 407 tristate "Michael MIC keyed digest algorithm"
321 depends on CRYPTO 408 select CRYPTO_ALGAPI
322 help 409 help
323 Michael MIC is used for message integrity protection in TKIP 410 Michael MIC is used for message integrity protection in TKIP
324 (IEEE 802.11i). This algorithm is required for TKIP, but it 411 (IEEE 802.11i). This algorithm is required for TKIP, but it
@@ -327,7 +414,7 @@ config CRYPTO_MICHAEL_MIC
327 414
328config CRYPTO_CRC32C 415config CRYPTO_CRC32C
329 tristate "CRC32c CRC algorithm" 416 tristate "CRC32c CRC algorithm"
330 depends on CRYPTO 417 select CRYPTO_ALGAPI
331 select LIBCRC32C 418 select LIBCRC32C
332 help 419 help
333 Castagnoli, et al Cyclic Redundancy-Check Algorithm. Used 420 Castagnoli, et al Cyclic Redundancy-Check Algorithm. Used
@@ -337,10 +424,13 @@ config CRYPTO_CRC32C
337 424
338config CRYPTO_TEST 425config CRYPTO_TEST
339 tristate "Testing module" 426 tristate "Testing module"
340 depends on CRYPTO && m 427 depends on m
428 select CRYPTO_ALGAPI
341 help 429 help
342 Quick & dirty crypto test module. 430 Quick & dirty crypto test module.
343 431
344source "drivers/crypto/Kconfig" 432source "drivers/crypto/Kconfig"
345endmenu
346 433
434endif # if CRYPTO
435
436endmenu
diff --git a/crypto/Makefile b/crypto/Makefile
index d287b9e60c47..72366208e291 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -2,11 +2,18 @@
2# Cryptographic API 2# Cryptographic API
3# 3#
4 4
5proc-crypto-$(CONFIG_PROC_FS) = proc.o 5obj-$(CONFIG_CRYPTO) += api.o scatterwalk.o cipher.o digest.o compress.o
6 6
7obj-$(CONFIG_CRYPTO) += api.o scatterwalk.o cipher.o digest.o compress.o \ 7crypto_algapi-$(CONFIG_PROC_FS) += proc.o
8 $(proc-crypto-y) 8crypto_algapi-objs := algapi.o $(crypto_algapi-y)
9obj-$(CONFIG_CRYPTO_ALGAPI) += crypto_algapi.o
9 10
11obj-$(CONFIG_CRYPTO_BLKCIPHER) += blkcipher.o
12
13crypto_hash-objs := hash.o
14obj-$(CONFIG_CRYPTO_HASH) += crypto_hash.o
15
16obj-$(CONFIG_CRYPTO_MANAGER) += cryptomgr.o
10obj-$(CONFIG_CRYPTO_HMAC) += hmac.o 17obj-$(CONFIG_CRYPTO_HMAC) += hmac.o
11obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o 18obj-$(CONFIG_CRYPTO_NULL) += crypto_null.o
12obj-$(CONFIG_CRYPTO_MD4) += md4.o 19obj-$(CONFIG_CRYPTO_MD4) += md4.o
@@ -16,9 +23,12 @@ obj-$(CONFIG_CRYPTO_SHA256) += sha256.o
16obj-$(CONFIG_CRYPTO_SHA512) += sha512.o 23obj-$(CONFIG_CRYPTO_SHA512) += sha512.o
17obj-$(CONFIG_CRYPTO_WP512) += wp512.o 24obj-$(CONFIG_CRYPTO_WP512) += wp512.o
18obj-$(CONFIG_CRYPTO_TGR192) += tgr192.o 25obj-$(CONFIG_CRYPTO_TGR192) += tgr192.o
26obj-$(CONFIG_CRYPTO_ECB) += ecb.o
27obj-$(CONFIG_CRYPTO_CBC) += cbc.o
19obj-$(CONFIG_CRYPTO_DES) += des.o 28obj-$(CONFIG_CRYPTO_DES) += des.o
20obj-$(CONFIG_CRYPTO_BLOWFISH) += blowfish.o 29obj-$(CONFIG_CRYPTO_BLOWFISH) += blowfish.o
21obj-$(CONFIG_CRYPTO_TWOFISH) += twofish.o 30obj-$(CONFIG_CRYPTO_TWOFISH) += twofish.o
31obj-$(CONFIG_CRYPTO_TWOFISH_COMMON) += twofish_common.o
22obj-$(CONFIG_CRYPTO_SERPENT) += serpent.o 32obj-$(CONFIG_CRYPTO_SERPENT) += serpent.o
23obj-$(CONFIG_CRYPTO_AES) += aes.o 33obj-$(CONFIG_CRYPTO_AES) += aes.o
24obj-$(CONFIG_CRYPTO_CAST5) += cast5.o 34obj-$(CONFIG_CRYPTO_CAST5) += cast5.o
diff --git a/crypto/aes.c b/crypto/aes.c
index a038711831e7..e2440773878c 100644
--- a/crypto/aes.c
+++ b/crypto/aes.c
@@ -249,13 +249,14 @@ gen_tabs (void)
249} 249}
250 250
251static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 251static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
252 unsigned int key_len, u32 *flags) 252 unsigned int key_len)
253{ 253{
254 struct aes_ctx *ctx = crypto_tfm_ctx(tfm); 254 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
255 const __le32 *key = (const __le32 *)in_key; 255 const __le32 *key = (const __le32 *)in_key;
256 u32 *flags = &tfm->crt_flags;
256 u32 i, t, u, v, w; 257 u32 i, t, u, v, w;
257 258
258 if (key_len != 16 && key_len != 24 && key_len != 32) { 259 if (key_len % 8) {
259 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 260 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
260 return -EINVAL; 261 return -EINVAL;
261 } 262 }
diff --git a/crypto/algapi.c b/crypto/algapi.c
new file mode 100644
index 000000000000..c91530021e9c
--- /dev/null
+++ b/crypto/algapi.c
@@ -0,0 +1,486 @@
1/*
2 * Cryptographic API for algorithms (i.e., low-level API).
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#include <linux/err.h>
14#include <linux/errno.h>
15#include <linux/init.h>
16#include <linux/kernel.h>
17#include <linux/list.h>
18#include <linux/module.h>
19#include <linux/rtnetlink.h>
20#include <linux/string.h>
21
22#include "internal.h"
23
24static LIST_HEAD(crypto_template_list);
25
26void crypto_larval_error(const char *name, u32 type, u32 mask)
27{
28 struct crypto_alg *alg;
29
30 down_read(&crypto_alg_sem);
31 alg = __crypto_alg_lookup(name, type, mask);
32 up_read(&crypto_alg_sem);
33
34 if (alg) {
35 if (crypto_is_larval(alg)) {
36 struct crypto_larval *larval = (void *)alg;
37 complete(&larval->completion);
38 }
39 crypto_mod_put(alg);
40 }
41}
42EXPORT_SYMBOL_GPL(crypto_larval_error);
43
44static inline int crypto_set_driver_name(struct crypto_alg *alg)
45{
46 static const char suffix[] = "-generic";
47 char *driver_name = alg->cra_driver_name;
48 int len;
49
50 if (*driver_name)
51 return 0;
52
53 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
54 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME)
55 return -ENAMETOOLONG;
56
57 memcpy(driver_name + len, suffix, sizeof(suffix));
58 return 0;
59}
60
61static int crypto_check_alg(struct crypto_alg *alg)
62{
63 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
64 return -EINVAL;
65
66 if (alg->cra_alignmask & alg->cra_blocksize)
67 return -EINVAL;
68
69 if (alg->cra_blocksize > PAGE_SIZE / 8)
70 return -EINVAL;
71
72 if (alg->cra_priority < 0)
73 return -EINVAL;
74
75 return crypto_set_driver_name(alg);
76}
77
78static void crypto_destroy_instance(struct crypto_alg *alg)
79{
80 struct crypto_instance *inst = (void *)alg;
81 struct crypto_template *tmpl = inst->tmpl;
82
83 tmpl->free(inst);
84 crypto_tmpl_put(tmpl);
85}
86
87static void crypto_remove_spawns(struct list_head *spawns,
88 struct list_head *list)
89{
90 struct crypto_spawn *spawn, *n;
91
92 list_for_each_entry_safe(spawn, n, spawns, list) {
93 struct crypto_instance *inst = spawn->inst;
94 struct crypto_template *tmpl = inst->tmpl;
95
96 list_del_init(&spawn->list);
97 spawn->alg = NULL;
98
99 if (crypto_is_dead(&inst->alg))
100 continue;
101
102 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
103 if (!tmpl || !crypto_tmpl_get(tmpl))
104 continue;
105
106 crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, &inst->alg);
107 list_move(&inst->alg.cra_list, list);
108 hlist_del(&inst->list);
109 inst->alg.cra_destroy = crypto_destroy_instance;
110
111 if (!list_empty(&inst->alg.cra_users)) {
112 if (&n->list == spawns)
113 n = list_entry(inst->alg.cra_users.next,
114 typeof(*n), list);
115 __list_splice(&inst->alg.cra_users, spawns->prev);
116 }
117 }
118}
119
120static int __crypto_register_alg(struct crypto_alg *alg,
121 struct list_head *list)
122{
123 struct crypto_alg *q;
124 int ret = -EAGAIN;
125
126 if (crypto_is_dead(alg))
127 goto out;
128
129 INIT_LIST_HEAD(&alg->cra_users);
130
131 ret = -EEXIST;
132
133 atomic_set(&alg->cra_refcnt, 1);
134 list_for_each_entry(q, &crypto_alg_list, cra_list) {
135 if (q == alg)
136 goto out;
137
138 if (crypto_is_moribund(q))
139 continue;
140
141 if (crypto_is_larval(q)) {
142 struct crypto_larval *larval = (void *)q;
143
144 if (strcmp(alg->cra_name, q->cra_name) &&
145 strcmp(alg->cra_driver_name, q->cra_name))
146 continue;
147
148 if (larval->adult)
149 continue;
150 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
151 continue;
152 if (!crypto_mod_get(alg))
153 continue;
154
155 larval->adult = alg;
156 complete(&larval->completion);
157 continue;
158 }
159
160 if (strcmp(alg->cra_name, q->cra_name))
161 continue;
162
163 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
164 q->cra_priority > alg->cra_priority)
165 continue;
166
167 crypto_remove_spawns(&q->cra_users, list);
168 }
169
170 list_add(&alg->cra_list, &crypto_alg_list);
171
172 crypto_notify(CRYPTO_MSG_ALG_REGISTER, alg);
173 ret = 0;
174
175out:
176 return ret;
177}
178
179static void crypto_remove_final(struct list_head *list)
180{
181 struct crypto_alg *alg;
182 struct crypto_alg *n;
183
184 list_for_each_entry_safe(alg, n, list, cra_list) {
185 list_del_init(&alg->cra_list);
186 crypto_alg_put(alg);
187 }
188}
189
190int crypto_register_alg(struct crypto_alg *alg)
191{
192 LIST_HEAD(list);
193 int err;
194
195 err = crypto_check_alg(alg);
196 if (err)
197 return err;
198
199 down_write(&crypto_alg_sem);
200 err = __crypto_register_alg(alg, &list);
201 up_write(&crypto_alg_sem);
202
203 crypto_remove_final(&list);
204 return err;
205}
206EXPORT_SYMBOL_GPL(crypto_register_alg);
207
208static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
209{
210 if (unlikely(list_empty(&alg->cra_list)))
211 return -ENOENT;
212
213 alg->cra_flags |= CRYPTO_ALG_DEAD;
214
215 crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg);
216 list_del_init(&alg->cra_list);
217 crypto_remove_spawns(&alg->cra_users, list);
218
219 return 0;
220}
221
222int crypto_unregister_alg(struct crypto_alg *alg)
223{
224 int ret;
225 LIST_HEAD(list);
226
227 down_write(&crypto_alg_sem);
228 ret = crypto_remove_alg(alg, &list);
229 up_write(&crypto_alg_sem);
230
231 if (ret)
232 return ret;
233
234 BUG_ON(atomic_read(&alg->cra_refcnt) != 1);
235 if (alg->cra_destroy)
236 alg->cra_destroy(alg);
237
238 crypto_remove_final(&list);
239 return 0;
240}
241EXPORT_SYMBOL_GPL(crypto_unregister_alg);
242
243int crypto_register_template(struct crypto_template *tmpl)
244{
245 struct crypto_template *q;
246 int err = -EEXIST;
247
248 down_write(&crypto_alg_sem);
249
250 list_for_each_entry(q, &crypto_template_list, list) {
251 if (q == tmpl)
252 goto out;
253 }
254
255 list_add(&tmpl->list, &crypto_template_list);
256 crypto_notify(CRYPTO_MSG_TMPL_REGISTER, tmpl);
257 err = 0;
258out:
259 up_write(&crypto_alg_sem);
260 return err;
261}
262EXPORT_SYMBOL_GPL(crypto_register_template);
263
264void crypto_unregister_template(struct crypto_template *tmpl)
265{
266 struct crypto_instance *inst;
267 struct hlist_node *p, *n;
268 struct hlist_head *list;
269 LIST_HEAD(users);
270
271 down_write(&crypto_alg_sem);
272
273 BUG_ON(list_empty(&tmpl->list));
274 list_del_init(&tmpl->list);
275
276 list = &tmpl->instances;
277 hlist_for_each_entry(inst, p, list, list) {
278 int err = crypto_remove_alg(&inst->alg, &users);
279 BUG_ON(err);
280 }
281
282 crypto_notify(CRYPTO_MSG_TMPL_UNREGISTER, tmpl);
283
284 up_write(&crypto_alg_sem);
285
286 hlist_for_each_entry_safe(inst, p, n, list, list) {
287 BUG_ON(atomic_read(&inst->alg.cra_refcnt) != 1);
288 tmpl->free(inst);
289 }
290 crypto_remove_final(&users);
291}
292EXPORT_SYMBOL_GPL(crypto_unregister_template);
293
294static struct crypto_template *__crypto_lookup_template(const char *name)
295{
296 struct crypto_template *q, *tmpl = NULL;
297
298 down_read(&crypto_alg_sem);
299 list_for_each_entry(q, &crypto_template_list, list) {
300 if (strcmp(q->name, name))
301 continue;
302 if (unlikely(!crypto_tmpl_get(q)))
303 continue;
304
305 tmpl = q;
306 break;
307 }
308 up_read(&crypto_alg_sem);
309
310 return tmpl;
311}
312
313struct crypto_template *crypto_lookup_template(const char *name)
314{
315 return try_then_request_module(__crypto_lookup_template(name), name);
316}
317EXPORT_SYMBOL_GPL(crypto_lookup_template);
318
319int crypto_register_instance(struct crypto_template *tmpl,
320 struct crypto_instance *inst)
321{
322 LIST_HEAD(list);
323 int err = -EINVAL;
324
325 if (inst->alg.cra_destroy)
326 goto err;
327
328 err = crypto_check_alg(&inst->alg);
329 if (err)
330 goto err;
331
332 inst->alg.cra_module = tmpl->module;
333
334 down_write(&crypto_alg_sem);
335
336 err = __crypto_register_alg(&inst->alg, &list);
337 if (err)
338 goto unlock;
339
340 hlist_add_head(&inst->list, &tmpl->instances);
341 inst->tmpl = tmpl;
342
343unlock:
344 up_write(&crypto_alg_sem);
345
346 crypto_remove_final(&list);
347
348err:
349 return err;
350}
351EXPORT_SYMBOL_GPL(crypto_register_instance);
352
353int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
354 struct crypto_instance *inst)
355{
356 int err = -EAGAIN;
357
358 spawn->inst = inst;
359
360 down_write(&crypto_alg_sem);
361 if (!crypto_is_moribund(alg)) {
362 list_add(&spawn->list, &alg->cra_users);
363 spawn->alg = alg;
364 err = 0;
365 }
366 up_write(&crypto_alg_sem);
367
368 return err;
369}
370EXPORT_SYMBOL_GPL(crypto_init_spawn);
371
372void crypto_drop_spawn(struct crypto_spawn *spawn)
373{
374 down_write(&crypto_alg_sem);
375 list_del(&spawn->list);
376 up_write(&crypto_alg_sem);
377}
378EXPORT_SYMBOL_GPL(crypto_drop_spawn);
379
380struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn)
381{
382 struct crypto_alg *alg;
383 struct crypto_alg *alg2;
384 struct crypto_tfm *tfm;
385
386 down_read(&crypto_alg_sem);
387 alg = spawn->alg;
388 alg2 = alg;
389 if (alg2)
390 alg2 = crypto_mod_get(alg2);
391 up_read(&crypto_alg_sem);
392
393 if (!alg2) {
394 if (alg)
395 crypto_shoot_alg(alg);
396 return ERR_PTR(-EAGAIN);
397 }
398
399 tfm = __crypto_alloc_tfm(alg, 0);
400 if (IS_ERR(tfm))
401 crypto_mod_put(alg);
402
403 return tfm;
404}
405EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
406
407int crypto_register_notifier(struct notifier_block *nb)
408{
409 return blocking_notifier_chain_register(&crypto_chain, nb);
410}
411EXPORT_SYMBOL_GPL(crypto_register_notifier);
412
413int crypto_unregister_notifier(struct notifier_block *nb)
414{
415 return blocking_notifier_chain_unregister(&crypto_chain, nb);
416}
417EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
418
419struct crypto_alg *crypto_get_attr_alg(void *param, unsigned int len,
420 u32 type, u32 mask)
421{
422 struct rtattr *rta = param;
423 struct crypto_attr_alg *alga;
424
425 if (!RTA_OK(rta, len))
426 return ERR_PTR(-EBADR);
427 if (rta->rta_type != CRYPTOA_ALG || RTA_PAYLOAD(rta) < sizeof(*alga))
428 return ERR_PTR(-EINVAL);
429
430 alga = RTA_DATA(rta);
431 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
432
433 return crypto_alg_mod_lookup(alga->name, type, mask);
434}
435EXPORT_SYMBOL_GPL(crypto_get_attr_alg);
436
437struct crypto_instance *crypto_alloc_instance(const char *name,
438 struct crypto_alg *alg)
439{
440 struct crypto_instance *inst;
441 struct crypto_spawn *spawn;
442 int err;
443
444 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
445 if (!inst)
446 return ERR_PTR(-ENOMEM);
447
448 err = -ENAMETOOLONG;
449 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
450 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
451 goto err_free_inst;
452
453 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
454 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
455 goto err_free_inst;
456
457 spawn = crypto_instance_ctx(inst);
458 err = crypto_init_spawn(spawn, alg, inst);
459
460 if (err)
461 goto err_free_inst;
462
463 return inst;
464
465err_free_inst:
466 kfree(inst);
467 return ERR_PTR(err);
468}
469EXPORT_SYMBOL_GPL(crypto_alloc_instance);
470
471static int __init crypto_algapi_init(void)
472{
473 crypto_init_proc();
474 return 0;
475}
476
477static void __exit crypto_algapi_exit(void)
478{
479 crypto_exit_proc();
480}
481
482module_init(crypto_algapi_init);
483module_exit(crypto_algapi_exit);
484
485MODULE_LICENSE("GPL");
486MODULE_DESCRIPTION("Cryptographic algorithms API");
diff --git a/crypto/anubis.c b/crypto/anubis.c
index 7e2e1a29800e..1c771f7f4dc5 100644
--- a/crypto/anubis.c
+++ b/crypto/anubis.c
@@ -461,10 +461,11 @@ static const u32 rc[] = {
461}; 461};
462 462
463static int anubis_setkey(struct crypto_tfm *tfm, const u8 *in_key, 463static int anubis_setkey(struct crypto_tfm *tfm, const u8 *in_key,
464 unsigned int key_len, u32 *flags) 464 unsigned int key_len)
465{ 465{
466 struct anubis_ctx *ctx = crypto_tfm_ctx(tfm); 466 struct anubis_ctx *ctx = crypto_tfm_ctx(tfm);
467 const __be32 *key = (const __be32 *)in_key; 467 const __be32 *key = (const __be32 *)in_key;
468 u32 *flags = &tfm->crt_flags;
468 int N, R, i, r; 469 int N, R, i, r;
469 u32 kappa[ANUBIS_MAX_N]; 470 u32 kappa[ANUBIS_MAX_N];
470 u32 inter[ANUBIS_MAX_N]; 471 u32 inter[ANUBIS_MAX_N];
diff --git a/crypto/api.c b/crypto/api.c
index c11ec1fd4f18..2e84d4b54790 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -15,70 +15,202 @@
15 * 15 *
16 */ 16 */
17 17
18#include <linux/compiler.h> 18#include <linux/err.h>
19#include <linux/init.h>
20#include <linux/crypto.h>
21#include <linux/errno.h> 19#include <linux/errno.h>
22#include <linux/kernel.h> 20#include <linux/kernel.h>
23#include <linux/kmod.h> 21#include <linux/kmod.h>
24#include <linux/rwsem.h> 22#include <linux/module.h>
23#include <linux/param.h>
24#include <linux/sched.h>
25#include <linux/slab.h> 25#include <linux/slab.h>
26#include <linux/string.h> 26#include <linux/string.h>
27#include "internal.h" 27#include "internal.h"
28 28
29LIST_HEAD(crypto_alg_list); 29LIST_HEAD(crypto_alg_list);
30EXPORT_SYMBOL_GPL(crypto_alg_list);
30DECLARE_RWSEM(crypto_alg_sem); 31DECLARE_RWSEM(crypto_alg_sem);
32EXPORT_SYMBOL_GPL(crypto_alg_sem);
31 33
32static inline int crypto_alg_get(struct crypto_alg *alg) 34BLOCKING_NOTIFIER_HEAD(crypto_chain);
35EXPORT_SYMBOL_GPL(crypto_chain);
36
37static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
38{
39 atomic_inc(&alg->cra_refcnt);
40 return alg;
41}
42
43struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
33{ 44{
34 return try_module_get(alg->cra_module); 45 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
35} 46}
47EXPORT_SYMBOL_GPL(crypto_mod_get);
36 48
37static inline void crypto_alg_put(struct crypto_alg *alg) 49void crypto_mod_put(struct crypto_alg *alg)
38{ 50{
51 crypto_alg_put(alg);
39 module_put(alg->cra_module); 52 module_put(alg->cra_module);
40} 53}
54EXPORT_SYMBOL_GPL(crypto_mod_put);
41 55
42static struct crypto_alg *crypto_alg_lookup(const char *name) 56struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask)
43{ 57{
44 struct crypto_alg *q, *alg = NULL; 58 struct crypto_alg *q, *alg = NULL;
45 int best = -1; 59 int best = -2;
46 60
47 if (!name)
48 return NULL;
49
50 down_read(&crypto_alg_sem);
51
52 list_for_each_entry(q, &crypto_alg_list, cra_list) { 61 list_for_each_entry(q, &crypto_alg_list, cra_list) {
53 int exact, fuzzy; 62 int exact, fuzzy;
54 63
64 if (crypto_is_moribund(q))
65 continue;
66
67 if ((q->cra_flags ^ type) & mask)
68 continue;
69
70 if (crypto_is_larval(q) &&
71 ((struct crypto_larval *)q)->mask != mask)
72 continue;
73
55 exact = !strcmp(q->cra_driver_name, name); 74 exact = !strcmp(q->cra_driver_name, name);
56 fuzzy = !strcmp(q->cra_name, name); 75 fuzzy = !strcmp(q->cra_name, name);
57 if (!exact && !(fuzzy && q->cra_priority > best)) 76 if (!exact && !(fuzzy && q->cra_priority > best))
58 continue; 77 continue;
59 78
60 if (unlikely(!crypto_alg_get(q))) 79 if (unlikely(!crypto_mod_get(q)))
61 continue; 80 continue;
62 81
63 best = q->cra_priority; 82 best = q->cra_priority;
64 if (alg) 83 if (alg)
65 crypto_alg_put(alg); 84 crypto_mod_put(alg);
66 alg = q; 85 alg = q;
67 86
68 if (exact) 87 if (exact)
69 break; 88 break;
70 } 89 }
71 90
91 return alg;
92}
93EXPORT_SYMBOL_GPL(__crypto_alg_lookup);
94
95static void crypto_larval_destroy(struct crypto_alg *alg)
96{
97 struct crypto_larval *larval = (void *)alg;
98
99 BUG_ON(!crypto_is_larval(alg));
100 if (larval->adult)
101 crypto_mod_put(larval->adult);
102 kfree(larval);
103}
104
105static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type,
106 u32 mask)
107{
108 struct crypto_alg *alg;
109 struct crypto_larval *larval;
110
111 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
112 if (!larval)
113 return ERR_PTR(-ENOMEM);
114
115 larval->mask = mask;
116 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
117 larval->alg.cra_priority = -1;
118 larval->alg.cra_destroy = crypto_larval_destroy;
119
120 atomic_set(&larval->alg.cra_refcnt, 2);
121 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
122 init_completion(&larval->completion);
123
124 down_write(&crypto_alg_sem);
125 alg = __crypto_alg_lookup(name, type, mask);
126 if (!alg) {
127 alg = &larval->alg;
128 list_add(&alg->cra_list, &crypto_alg_list);
129 }
130 up_write(&crypto_alg_sem);
131
132 if (alg != &larval->alg)
133 kfree(larval);
134
135 return alg;
136}
137
138static void crypto_larval_kill(struct crypto_alg *alg)
139{
140 struct crypto_larval *larval = (void *)alg;
141
142 down_write(&crypto_alg_sem);
143 list_del(&alg->cra_list);
144 up_write(&crypto_alg_sem);
145 complete(&larval->completion);
146 crypto_alg_put(alg);
147}
148
149static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
150{
151 struct crypto_larval *larval = (void *)alg;
152
153 wait_for_completion_interruptible_timeout(&larval->completion, 60 * HZ);
154 alg = larval->adult;
155 if (alg) {
156 if (!crypto_mod_get(alg))
157 alg = ERR_PTR(-EAGAIN);
158 } else
159 alg = ERR_PTR(-ENOENT);
160 crypto_mod_put(&larval->alg);
161
162 return alg;
163}
164
165static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
166 u32 mask)
167{
168 struct crypto_alg *alg;
169
170 down_read(&crypto_alg_sem);
171 alg = __crypto_alg_lookup(name, type, mask);
72 up_read(&crypto_alg_sem); 172 up_read(&crypto_alg_sem);
173
73 return alg; 174 return alg;
74} 175}
75 176
76/* A far more intelligent version of this is planned. For now, just 177struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
77 * try an exact match on the name of the algorithm. */
78static inline struct crypto_alg *crypto_alg_mod_lookup(const char *name)
79{ 178{
80 return try_then_request_module(crypto_alg_lookup(name), name); 179 struct crypto_alg *alg;
180 struct crypto_alg *larval;
181 int ok;
182
183 if (!name)
184 return ERR_PTR(-ENOENT);
185
186 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
187 type &= mask;
188
189 alg = try_then_request_module(crypto_alg_lookup(name, type, mask),
190 name);
191 if (alg)
192 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
193
194 larval = crypto_larval_alloc(name, type, mask);
195 if (IS_ERR(larval) || !crypto_is_larval(larval))
196 return larval;
197
198 ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval);
199 if (ok == NOTIFY_DONE) {
200 request_module("cryptomgr");
201 ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval);
202 }
203
204 if (ok == NOTIFY_STOP)
205 alg = crypto_larval_wait(larval);
206 else {
207 crypto_mod_put(larval);
208 alg = ERR_PTR(-ENOENT);
209 }
210 crypto_larval_kill(larval);
211 return alg;
81} 212}
213EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
82 214
83static int crypto_init_flags(struct crypto_tfm *tfm, u32 flags) 215static int crypto_init_flags(struct crypto_tfm *tfm, u32 flags)
84{ 216{
@@ -94,17 +226,18 @@ static int crypto_init_flags(struct crypto_tfm *tfm, u32 flags)
94 226
95 case CRYPTO_ALG_TYPE_COMPRESS: 227 case CRYPTO_ALG_TYPE_COMPRESS:
96 return crypto_init_compress_flags(tfm, flags); 228 return crypto_init_compress_flags(tfm, flags);
97
98 default:
99 break;
100 } 229 }
101 230
102 BUG(); 231 return 0;
103 return -EINVAL;
104} 232}
105 233
106static int crypto_init_ops(struct crypto_tfm *tfm) 234static int crypto_init_ops(struct crypto_tfm *tfm)
107{ 235{
236 const struct crypto_type *type = tfm->__crt_alg->cra_type;
237
238 if (type)
239 return type->init(tfm);
240
108 switch (crypto_tfm_alg_type(tfm)) { 241 switch (crypto_tfm_alg_type(tfm)) {
109 case CRYPTO_ALG_TYPE_CIPHER: 242 case CRYPTO_ALG_TYPE_CIPHER:
110 return crypto_init_cipher_ops(tfm); 243 return crypto_init_cipher_ops(tfm);
@@ -125,6 +258,14 @@ static int crypto_init_ops(struct crypto_tfm *tfm)
125 258
126static void crypto_exit_ops(struct crypto_tfm *tfm) 259static void crypto_exit_ops(struct crypto_tfm *tfm)
127{ 260{
261 const struct crypto_type *type = tfm->__crt_alg->cra_type;
262
263 if (type) {
264 if (type->exit)
265 type->exit(tfm);
266 return;
267 }
268
128 switch (crypto_tfm_alg_type(tfm)) { 269 switch (crypto_tfm_alg_type(tfm)) {
129 case CRYPTO_ALG_TYPE_CIPHER: 270 case CRYPTO_ALG_TYPE_CIPHER:
130 crypto_exit_cipher_ops(tfm); 271 crypto_exit_cipher_ops(tfm);
@@ -146,53 +287,67 @@ static void crypto_exit_ops(struct crypto_tfm *tfm)
146 287
147static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags) 288static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags)
148{ 289{
290 const struct crypto_type *type = alg->cra_type;
149 unsigned int len; 291 unsigned int len;
150 292
293 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
294 if (type)
295 return len + type->ctxsize(alg);
296
151 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { 297 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
152 default: 298 default:
153 BUG(); 299 BUG();
154 300
155 case CRYPTO_ALG_TYPE_CIPHER: 301 case CRYPTO_ALG_TYPE_CIPHER:
156 len = crypto_cipher_ctxsize(alg, flags); 302 len += crypto_cipher_ctxsize(alg, flags);
157 break; 303 break;
158 304
159 case CRYPTO_ALG_TYPE_DIGEST: 305 case CRYPTO_ALG_TYPE_DIGEST:
160 len = crypto_digest_ctxsize(alg, flags); 306 len += crypto_digest_ctxsize(alg, flags);
161 break; 307 break;
162 308
163 case CRYPTO_ALG_TYPE_COMPRESS: 309 case CRYPTO_ALG_TYPE_COMPRESS:
164 len = crypto_compress_ctxsize(alg, flags); 310 len += crypto_compress_ctxsize(alg, flags);
165 break; 311 break;
166 } 312 }
167 313
168 return len + (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 314 return len;
169} 315}
170 316
171struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags) 317void crypto_shoot_alg(struct crypto_alg *alg)
318{
319 down_write(&crypto_alg_sem);
320 alg->cra_flags |= CRYPTO_ALG_DYING;
321 up_write(&crypto_alg_sem);
322}
323EXPORT_SYMBOL_GPL(crypto_shoot_alg);
324
325struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 flags)
172{ 326{
173 struct crypto_tfm *tfm = NULL; 327 struct crypto_tfm *tfm = NULL;
174 struct crypto_alg *alg;
175 unsigned int tfm_size; 328 unsigned int tfm_size;
176 329 int err = -ENOMEM;
177 alg = crypto_alg_mod_lookup(name);
178 if (alg == NULL)
179 goto out;
180 330
181 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags); 331 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags);
182 tfm = kzalloc(tfm_size, GFP_KERNEL); 332 tfm = kzalloc(tfm_size, GFP_KERNEL);
183 if (tfm == NULL) 333 if (tfm == NULL)
184 goto out_put; 334 goto out;
185 335
186 tfm->__crt_alg = alg; 336 tfm->__crt_alg = alg;
187 337
188 if (crypto_init_flags(tfm, flags)) 338 err = crypto_init_flags(tfm, flags);
339 if (err)
189 goto out_free_tfm; 340 goto out_free_tfm;
190 341
191 if (crypto_init_ops(tfm)) 342 err = crypto_init_ops(tfm);
343 if (err)
192 goto out_free_tfm; 344 goto out_free_tfm;
193 345
194 if (alg->cra_init && alg->cra_init(tfm)) 346 if (alg->cra_init && (err = alg->cra_init(tfm))) {
347 if (err == -EAGAIN)
348 crypto_shoot_alg(alg);
195 goto cra_init_failed; 349 goto cra_init_failed;
350 }
196 351
197 goto out; 352 goto out;
198 353
@@ -200,13 +355,97 @@ cra_init_failed:
200 crypto_exit_ops(tfm); 355 crypto_exit_ops(tfm);
201out_free_tfm: 356out_free_tfm:
202 kfree(tfm); 357 kfree(tfm);
203 tfm = NULL; 358 tfm = ERR_PTR(err);
204out_put:
205 crypto_alg_put(alg);
206out: 359out:
207 return tfm; 360 return tfm;
208} 361}
362EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
363
364struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags)
365{
366 struct crypto_tfm *tfm = NULL;
367 int err;
368
369 do {
370 struct crypto_alg *alg;
371
372 alg = crypto_alg_mod_lookup(name, 0, CRYPTO_ALG_ASYNC);
373 err = PTR_ERR(alg);
374 if (IS_ERR(alg))
375 continue;
376
377 tfm = __crypto_alloc_tfm(alg, flags);
378 err = 0;
379 if (IS_ERR(tfm)) {
380 crypto_mod_put(alg);
381 err = PTR_ERR(tfm);
382 tfm = NULL;
383 }
384 } while (err == -EAGAIN && !signal_pending(current));
385
386 return tfm;
387}
388
389/*
390 * crypto_alloc_base - Locate algorithm and allocate transform
391 * @alg_name: Name of algorithm
392 * @type: Type of algorithm
393 * @mask: Mask for type comparison
394 *
395 * crypto_alloc_base() will first attempt to locate an already loaded
396 * algorithm. If that fails and the kernel supports dynamically loadable
397 * modules, it will then attempt to load a module of the same name or
398 * alias. If that fails it will send a query to any loaded crypto manager
399 * to construct an algorithm on the fly. A refcount is grabbed on the
400 * algorithm which is then associated with the new transform.
401 *
402 * The returned transform is of a non-determinate type. Most people
403 * should use one of the more specific allocation functions such as
404 * crypto_alloc_blkcipher.
405 *
406 * In case of error the return value is an error pointer.
407 */
408struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
409{
410 struct crypto_tfm *tfm;
411 int err;
412
413 for (;;) {
414 struct crypto_alg *alg;
415
416 alg = crypto_alg_mod_lookup(alg_name, type, mask);
417 err = PTR_ERR(alg);
418 tfm = ERR_PTR(err);
419 if (IS_ERR(alg))
420 goto err;
421
422 tfm = __crypto_alloc_tfm(alg, 0);
423 if (!IS_ERR(tfm))
424 break;
425
426 crypto_mod_put(alg);
427 err = PTR_ERR(tfm);
209 428
429err:
430 if (err != -EAGAIN)
431 break;
432 if (signal_pending(current)) {
433 err = -EINTR;
434 break;
435 }
436 };
437
438 return tfm;
439}
440EXPORT_SYMBOL_GPL(crypto_alloc_base);
441
442/*
443 * crypto_free_tfm - Free crypto transform
444 * @tfm: Transform to free
445 *
446 * crypto_free_tfm() frees up the transform and any associated resources,
447 * then drops the refcount on the associated algorithm.
448 */
210void crypto_free_tfm(struct crypto_tfm *tfm) 449void crypto_free_tfm(struct crypto_tfm *tfm)
211{ 450{
212 struct crypto_alg *alg; 451 struct crypto_alg *alg;
@@ -221,108 +460,39 @@ void crypto_free_tfm(struct crypto_tfm *tfm)
221 if (alg->cra_exit) 460 if (alg->cra_exit)
222 alg->cra_exit(tfm); 461 alg->cra_exit(tfm);
223 crypto_exit_ops(tfm); 462 crypto_exit_ops(tfm);
224 crypto_alg_put(alg); 463 crypto_mod_put(alg);
225 memset(tfm, 0, size); 464 memset(tfm, 0, size);
226 kfree(tfm); 465 kfree(tfm);
227} 466}
228 467
229static inline int crypto_set_driver_name(struct crypto_alg *alg) 468int crypto_alg_available(const char *name, u32 flags)
230{
231 static const char suffix[] = "-generic";
232 char *driver_name = alg->cra_driver_name;
233 int len;
234
235 if (*driver_name)
236 return 0;
237
238 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
239 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME)
240 return -ENAMETOOLONG;
241
242 memcpy(driver_name + len, suffix, sizeof(suffix));
243 return 0;
244}
245
246int crypto_register_alg(struct crypto_alg *alg)
247{ 469{
248 int ret; 470 int ret = 0;
249 struct crypto_alg *q; 471 struct crypto_alg *alg = crypto_alg_mod_lookup(name, 0,
250 472 CRYPTO_ALG_ASYNC);
251 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
252 return -EINVAL;
253
254 if (alg->cra_alignmask & alg->cra_blocksize)
255 return -EINVAL;
256
257 if (alg->cra_blocksize > PAGE_SIZE / 8)
258 return -EINVAL;
259
260 if (alg->cra_priority < 0)
261 return -EINVAL;
262
263 ret = crypto_set_driver_name(alg);
264 if (unlikely(ret))
265 return ret;
266
267 down_write(&crypto_alg_sem);
268 473
269 list_for_each_entry(q, &crypto_alg_list, cra_list) { 474 if (!IS_ERR(alg)) {
270 if (q == alg) { 475 crypto_mod_put(alg);
271 ret = -EEXIST; 476 ret = 1;
272 goto out;
273 }
274 } 477 }
275 478
276 list_add(&alg->cra_list, &crypto_alg_list);
277out:
278 up_write(&crypto_alg_sem);
279 return ret; 479 return ret;
280} 480}
281 481
282int crypto_unregister_alg(struct crypto_alg *alg) 482EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
283{ 483EXPORT_SYMBOL_GPL(crypto_free_tfm);
284 int ret = -ENOENT; 484EXPORT_SYMBOL_GPL(crypto_alg_available);
285 struct crypto_alg *q;
286
287 BUG_ON(!alg->cra_module);
288
289 down_write(&crypto_alg_sem);
290 list_for_each_entry(q, &crypto_alg_list, cra_list) {
291 if (alg == q) {
292 list_del(&alg->cra_list);
293 ret = 0;
294 goto out;
295 }
296 }
297out:
298 up_write(&crypto_alg_sem);
299 return ret;
300}
301 485
302int crypto_alg_available(const char *name, u32 flags) 486int crypto_has_alg(const char *name, u32 type, u32 mask)
303{ 487{
304 int ret = 0; 488 int ret = 0;
305 struct crypto_alg *alg = crypto_alg_mod_lookup(name); 489 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
306 490
307 if (alg) { 491 if (!IS_ERR(alg)) {
308 crypto_alg_put(alg); 492 crypto_mod_put(alg);
309 ret = 1; 493 ret = 1;
310 } 494 }
311 495
312 return ret; 496 return ret;
313} 497}
314 498EXPORT_SYMBOL_GPL(crypto_has_alg);
315static int __init init_crypto(void)
316{
317 printk(KERN_INFO "Initializing Cryptographic API\n");
318 crypto_init_proc();
319 return 0;
320}
321
322__initcall(init_crypto);
323
324EXPORT_SYMBOL_GPL(crypto_register_alg);
325EXPORT_SYMBOL_GPL(crypto_unregister_alg);
326EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
327EXPORT_SYMBOL_GPL(crypto_free_tfm);
328EXPORT_SYMBOL_GPL(crypto_alg_available);
diff --git a/crypto/arc4.c b/crypto/arc4.c
index 5edc6a65b987..8be47e13a9e3 100644
--- a/crypto/arc4.c
+++ b/crypto/arc4.c
@@ -25,7 +25,7 @@ struct arc4_ctx {
25}; 25};
26 26
27static int arc4_set_key(struct crypto_tfm *tfm, const u8 *in_key, 27static int arc4_set_key(struct crypto_tfm *tfm, const u8 *in_key,
28 unsigned int key_len, u32 *flags) 28 unsigned int key_len)
29{ 29{
30 struct arc4_ctx *ctx = crypto_tfm_ctx(tfm); 30 struct arc4_ctx *ctx = crypto_tfm_ctx(tfm);
31 int i, j = 0, k = 0; 31 int i, j = 0, k = 0;
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
new file mode 100644
index 000000000000..034c939bf91a
--- /dev/null
+++ b/crypto/blkcipher.c
@@ -0,0 +1,405 @@
1/*
2 * Block chaining cipher operations.
3 *
4 * Generic encrypt/decrypt wrapper for ciphers, handles operations across
5 * multiple page boundaries by using temporary blocks. In user context,
6 * the kernel is given a chance to schedule us once per page.
7 *
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
9 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version.
14 *
15 */
16
17#include <linux/crypto.h>
18#include <linux/errno.h>
19#include <linux/kernel.h>
20#include <linux/io.h>
21#include <linux/module.h>
22#include <linux/scatterlist.h>
23#include <linux/seq_file.h>
24#include <linux/slab.h>
25#include <linux/string.h>
26
27#include "internal.h"
28#include "scatterwalk.h"
29
30enum {
31 BLKCIPHER_WALK_PHYS = 1 << 0,
32 BLKCIPHER_WALK_SLOW = 1 << 1,
33 BLKCIPHER_WALK_COPY = 1 << 2,
34 BLKCIPHER_WALK_DIFF = 1 << 3,
35};
36
37static int blkcipher_walk_next(struct blkcipher_desc *desc,
38 struct blkcipher_walk *walk);
39static int blkcipher_walk_first(struct blkcipher_desc *desc,
40 struct blkcipher_walk *walk);
41
42static inline void blkcipher_map_src(struct blkcipher_walk *walk)
43{
44 walk->src.virt.addr = scatterwalk_map(&walk->in, 0);
45}
46
47static inline void blkcipher_map_dst(struct blkcipher_walk *walk)
48{
49 walk->dst.virt.addr = scatterwalk_map(&walk->out, 1);
50}
51
52static inline void blkcipher_unmap_src(struct blkcipher_walk *walk)
53{
54 scatterwalk_unmap(walk->src.virt.addr, 0);
55}
56
57static inline void blkcipher_unmap_dst(struct blkcipher_walk *walk)
58{
59 scatterwalk_unmap(walk->dst.virt.addr, 1);
60}
61
62static inline u8 *blkcipher_get_spot(u8 *start, unsigned int len)
63{
64 if (offset_in_page(start + len) < len)
65 return (u8 *)((unsigned long)(start + len) & PAGE_MASK);
66 return start;
67}
68
69static inline unsigned int blkcipher_done_slow(struct crypto_blkcipher *tfm,
70 struct blkcipher_walk *walk,
71 unsigned int bsize)
72{
73 u8 *addr;
74 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
75
76 addr = (u8 *)ALIGN((unsigned long)walk->buffer, alignmask + 1);
77 addr = blkcipher_get_spot(addr, bsize);
78 scatterwalk_copychunks(addr, &walk->out, bsize, 1);
79 return bsize;
80}
81
82static inline unsigned int blkcipher_done_fast(struct blkcipher_walk *walk,
83 unsigned int n)
84{
85 n = walk->nbytes - n;
86
87 if (walk->flags & BLKCIPHER_WALK_COPY) {
88 blkcipher_map_dst(walk);
89 memcpy(walk->dst.virt.addr, walk->page, n);
90 blkcipher_unmap_dst(walk);
91 } else if (!(walk->flags & BLKCIPHER_WALK_PHYS)) {
92 blkcipher_unmap_src(walk);
93 if (walk->flags & BLKCIPHER_WALK_DIFF)
94 blkcipher_unmap_dst(walk);
95 }
96
97 scatterwalk_advance(&walk->in, n);
98 scatterwalk_advance(&walk->out, n);
99
100 return n;
101}
102
103int blkcipher_walk_done(struct blkcipher_desc *desc,
104 struct blkcipher_walk *walk, int err)
105{
106 struct crypto_blkcipher *tfm = desc->tfm;
107 unsigned int nbytes = 0;
108
109 if (likely(err >= 0)) {
110 unsigned int bsize = crypto_blkcipher_blocksize(tfm);
111 unsigned int n;
112
113 if (likely(!(walk->flags & BLKCIPHER_WALK_SLOW)))
114 n = blkcipher_done_fast(walk, err);
115 else
116 n = blkcipher_done_slow(tfm, walk, bsize);
117
118 nbytes = walk->total - n;
119 err = 0;
120 }
121
122 scatterwalk_done(&walk->in, 0, nbytes);
123 scatterwalk_done(&walk->out, 1, nbytes);
124
125 walk->total = nbytes;
126 walk->nbytes = nbytes;
127
128 if (nbytes) {
129 crypto_yield(desc->flags);
130 return blkcipher_walk_next(desc, walk);
131 }
132
133 if (walk->iv != desc->info)
134 memcpy(desc->info, walk->iv, crypto_blkcipher_ivsize(tfm));
135 if (walk->buffer != walk->page)
136 kfree(walk->buffer);
137 if (walk->page)
138 free_page((unsigned long)walk->page);
139
140 return err;
141}
142EXPORT_SYMBOL_GPL(blkcipher_walk_done);
143
144static inline int blkcipher_next_slow(struct blkcipher_desc *desc,
145 struct blkcipher_walk *walk,
146 unsigned int bsize,
147 unsigned int alignmask)
148{
149 unsigned int n;
150
151 if (walk->buffer)
152 goto ok;
153
154 walk->buffer = walk->page;
155 if (walk->buffer)
156 goto ok;
157
158 n = bsize * 2 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
159 walk->buffer = kmalloc(n, GFP_ATOMIC);
160 if (!walk->buffer)
161 return blkcipher_walk_done(desc, walk, -ENOMEM);
162
163ok:
164 walk->dst.virt.addr = (u8 *)ALIGN((unsigned long)walk->buffer,
165 alignmask + 1);
166 walk->dst.virt.addr = blkcipher_get_spot(walk->dst.virt.addr, bsize);
167 walk->src.virt.addr = blkcipher_get_spot(walk->dst.virt.addr + bsize,
168 bsize);
169
170 scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0);
171
172 walk->nbytes = bsize;
173 walk->flags |= BLKCIPHER_WALK_SLOW;
174
175 return 0;
176}
177
178static inline int blkcipher_next_copy(struct blkcipher_walk *walk)
179{
180 u8 *tmp = walk->page;
181
182 blkcipher_map_src(walk);
183 memcpy(tmp, walk->src.virt.addr, walk->nbytes);
184 blkcipher_unmap_src(walk);
185
186 walk->src.virt.addr = tmp;
187 walk->dst.virt.addr = tmp;
188
189 return 0;
190}
191
192static inline int blkcipher_next_fast(struct blkcipher_desc *desc,
193 struct blkcipher_walk *walk)
194{
195 unsigned long diff;
196
197 walk->src.phys.page = scatterwalk_page(&walk->in);
198 walk->src.phys.offset = offset_in_page(walk->in.offset);
199 walk->dst.phys.page = scatterwalk_page(&walk->out);
200 walk->dst.phys.offset = offset_in_page(walk->out.offset);
201
202 if (walk->flags & BLKCIPHER_WALK_PHYS)
203 return 0;
204
205 diff = walk->src.phys.offset - walk->dst.phys.offset;
206 diff |= walk->src.virt.page - walk->dst.virt.page;
207
208 blkcipher_map_src(walk);
209 walk->dst.virt.addr = walk->src.virt.addr;
210
211 if (diff) {
212 walk->flags |= BLKCIPHER_WALK_DIFF;
213 blkcipher_map_dst(walk);
214 }
215
216 return 0;
217}
218
219static int blkcipher_walk_next(struct blkcipher_desc *desc,
220 struct blkcipher_walk *walk)
221{
222 struct crypto_blkcipher *tfm = desc->tfm;
223 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
224 unsigned int bsize = crypto_blkcipher_blocksize(tfm);
225 unsigned int n;
226 int err;
227
228 n = walk->total;
229 if (unlikely(n < bsize)) {
230 desc->flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN;
231 return blkcipher_walk_done(desc, walk, -EINVAL);
232 }
233
234 walk->flags &= ~(BLKCIPHER_WALK_SLOW | BLKCIPHER_WALK_COPY |
235 BLKCIPHER_WALK_DIFF);
236 if (!scatterwalk_aligned(&walk->in, alignmask) ||
237 !scatterwalk_aligned(&walk->out, alignmask)) {
238 walk->flags |= BLKCIPHER_WALK_COPY;
239 if (!walk->page) {
240 walk->page = (void *)__get_free_page(GFP_ATOMIC);
241 if (!walk->page)
242 n = 0;
243 }
244 }
245
246 n = scatterwalk_clamp(&walk->in, n);
247 n = scatterwalk_clamp(&walk->out, n);
248
249 if (unlikely(n < bsize)) {
250 err = blkcipher_next_slow(desc, walk, bsize, alignmask);
251 goto set_phys_lowmem;
252 }
253
254 walk->nbytes = n;
255 if (walk->flags & BLKCIPHER_WALK_COPY) {
256 err = blkcipher_next_copy(walk);
257 goto set_phys_lowmem;
258 }
259
260 return blkcipher_next_fast(desc, walk);
261
262set_phys_lowmem:
263 if (walk->flags & BLKCIPHER_WALK_PHYS) {
264 walk->src.phys.page = virt_to_page(walk->src.virt.addr);
265 walk->dst.phys.page = virt_to_page(walk->dst.virt.addr);
266 walk->src.phys.offset &= PAGE_SIZE - 1;
267 walk->dst.phys.offset &= PAGE_SIZE - 1;
268 }
269 return err;
270}
271
272static inline int blkcipher_copy_iv(struct blkcipher_walk *walk,
273 struct crypto_blkcipher *tfm,
274 unsigned int alignmask)
275{
276 unsigned bs = crypto_blkcipher_blocksize(tfm);
277 unsigned int ivsize = crypto_blkcipher_ivsize(tfm);
278 unsigned int size = bs * 2 + ivsize + max(bs, ivsize) - (alignmask + 1);
279 u8 *iv;
280
281 size += alignmask & ~(crypto_tfm_ctx_alignment() - 1);
282 walk->buffer = kmalloc(size, GFP_ATOMIC);
283 if (!walk->buffer)
284 return -ENOMEM;
285
286 iv = (u8 *)ALIGN((unsigned long)walk->buffer, alignmask + 1);
287 iv = blkcipher_get_spot(iv, bs) + bs;
288 iv = blkcipher_get_spot(iv, bs) + bs;
289 iv = blkcipher_get_spot(iv, ivsize);
290
291 walk->iv = memcpy(iv, walk->iv, ivsize);
292 return 0;
293}
294
295int blkcipher_walk_virt(struct blkcipher_desc *desc,
296 struct blkcipher_walk *walk)
297{
298 walk->flags &= ~BLKCIPHER_WALK_PHYS;
299 return blkcipher_walk_first(desc, walk);
300}
301EXPORT_SYMBOL_GPL(blkcipher_walk_virt);
302
303int blkcipher_walk_phys(struct blkcipher_desc *desc,
304 struct blkcipher_walk *walk)
305{
306 walk->flags |= BLKCIPHER_WALK_PHYS;
307 return blkcipher_walk_first(desc, walk);
308}
309EXPORT_SYMBOL_GPL(blkcipher_walk_phys);
310
311static int blkcipher_walk_first(struct blkcipher_desc *desc,
312 struct blkcipher_walk *walk)
313{
314 struct crypto_blkcipher *tfm = desc->tfm;
315 unsigned int alignmask = crypto_blkcipher_alignmask(tfm);
316
317 walk->nbytes = walk->total;
318 if (unlikely(!walk->total))
319 return 0;
320
321 walk->buffer = NULL;
322 walk->iv = desc->info;
323 if (unlikely(((unsigned long)walk->iv & alignmask))) {
324 int err = blkcipher_copy_iv(walk, tfm, alignmask);
325 if (err)
326 return err;
327 }
328
329 scatterwalk_start(&walk->in, walk->in.sg);
330 scatterwalk_start(&walk->out, walk->out.sg);
331 walk->page = NULL;
332
333 return blkcipher_walk_next(desc, walk);
334}
335
336static int setkey(struct crypto_tfm *tfm, const u8 *key,
337 unsigned int keylen)
338{
339 struct blkcipher_alg *cipher = &tfm->__crt_alg->cra_blkcipher;
340
341 if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) {
342 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
343 return -EINVAL;
344 }
345
346 return cipher->setkey(tfm, key, keylen);
347}
348
349static unsigned int crypto_blkcipher_ctxsize(struct crypto_alg *alg)
350{
351 struct blkcipher_alg *cipher = &alg->cra_blkcipher;
352 unsigned int len = alg->cra_ctxsize;
353
354 if (cipher->ivsize) {
355 len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1);
356 len += cipher->ivsize;
357 }
358
359 return len;
360}
361
362static int crypto_init_blkcipher_ops(struct crypto_tfm *tfm)
363{
364 struct blkcipher_tfm *crt = &tfm->crt_blkcipher;
365 struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
366 unsigned long align = crypto_tfm_alg_alignmask(tfm) + 1;
367 unsigned long addr;
368
369 if (alg->ivsize > PAGE_SIZE / 8)
370 return -EINVAL;
371
372 crt->setkey = setkey;
373 crt->encrypt = alg->encrypt;
374 crt->decrypt = alg->decrypt;
375
376 addr = (unsigned long)crypto_tfm_ctx(tfm);
377 addr = ALIGN(addr, align);
378 addr += ALIGN(tfm->__crt_alg->cra_ctxsize, align);
379 crt->iv = (void *)addr;
380
381 return 0;
382}
383
384static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
385 __attribute_used__;
386static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
387{
388 seq_printf(m, "type : blkcipher\n");
389 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
390 seq_printf(m, "min keysize : %u\n", alg->cra_blkcipher.min_keysize);
391 seq_printf(m, "max keysize : %u\n", alg->cra_blkcipher.max_keysize);
392 seq_printf(m, "ivsize : %u\n", alg->cra_blkcipher.ivsize);
393}
394
395const struct crypto_type crypto_blkcipher_type = {
396 .ctxsize = crypto_blkcipher_ctxsize,
397 .init = crypto_init_blkcipher_ops,
398#ifdef CONFIG_PROC_FS
399 .show = crypto_blkcipher_show,
400#endif
401};
402EXPORT_SYMBOL_GPL(crypto_blkcipher_type);
403
404MODULE_LICENSE("GPL");
405MODULE_DESCRIPTION("Generic block chaining cipher type");
diff --git a/crypto/blowfish.c b/crypto/blowfish.c
index 490265f42b3b..55238c4e37f0 100644
--- a/crypto/blowfish.c
+++ b/crypto/blowfish.c
@@ -399,8 +399,7 @@ static void bf_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
399/* 399/*
400 * Calculates the blowfish S and P boxes for encryption and decryption. 400 * Calculates the blowfish S and P boxes for encryption and decryption.
401 */ 401 */
402static int bf_setkey(struct crypto_tfm *tfm, const u8 *key, 402static int bf_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
403 unsigned int keylen, u32 *flags)
404{ 403{
405 struct bf_ctx *ctx = crypto_tfm_ctx(tfm); 404 struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
406 u32 *P = ctx->p; 405 u32 *P = ctx->p;
diff --git a/crypto/cast5.c b/crypto/cast5.c
index 08eef58c1d3d..13ea60abc19a 100644
--- a/crypto/cast5.c
+++ b/crypto/cast5.c
@@ -769,8 +769,7 @@ static void key_schedule(u32 * x, u32 * z, u32 * k)
769} 769}
770 770
771 771
772static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, 772static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned key_len)
773 unsigned key_len, u32 *flags)
774{ 773{
775 struct cast5_ctx *c = crypto_tfm_ctx(tfm); 774 struct cast5_ctx *c = crypto_tfm_ctx(tfm);
776 int i; 775 int i;
@@ -778,11 +777,6 @@ static int cast5_setkey(struct crypto_tfm *tfm, const u8 *key,
778 u32 z[4]; 777 u32 z[4];
779 u32 k[16]; 778 u32 k[16];
780 __be32 p_key[4]; 779 __be32 p_key[4];
781
782 if (key_len < 5 || key_len > 16) {
783 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
784 return -EINVAL;
785 }
786 780
787 c->rr = key_len <= 10 ? 1 : 0; 781 c->rr = key_len <= 10 ? 1 : 0;
788 782
diff --git a/crypto/cast6.c b/crypto/cast6.c
index 08e33bfc3ad1..136ab6dfe8c5 100644
--- a/crypto/cast6.c
+++ b/crypto/cast6.c
@@ -382,14 +382,15 @@ static inline void W(u32 *key, unsigned int i) {
382} 382}
383 383
384static int cast6_setkey(struct crypto_tfm *tfm, const u8 *in_key, 384static int cast6_setkey(struct crypto_tfm *tfm, const u8 *in_key,
385 unsigned key_len, u32 *flags) 385 unsigned key_len)
386{ 386{
387 int i; 387 int i;
388 u32 key[8]; 388 u32 key[8];
389 __be32 p_key[8]; /* padded key */ 389 __be32 p_key[8]; /* padded key */
390 struct cast6_ctx *c = crypto_tfm_ctx(tfm); 390 struct cast6_ctx *c = crypto_tfm_ctx(tfm);
391 u32 *flags = &tfm->crt_flags;
391 392
392 if (key_len < 16 || key_len > 32 || key_len % 4 != 0) { 393 if (key_len % 4 != 0) {
393 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 394 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
394 return -EINVAL; 395 return -EINVAL;
395 } 396 }
diff --git a/crypto/cbc.c b/crypto/cbc.c
new file mode 100644
index 000000000000..f5542b4db387
--- /dev/null
+++ b/crypto/cbc.c
@@ -0,0 +1,344 @@
1/*
2 * CBC: Cipher Block Chaining mode
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#include <crypto/algapi.h>
14#include <linux/err.h>
15#include <linux/init.h>
16#include <linux/kernel.h>
17#include <linux/module.h>
18#include <linux/scatterlist.h>
19#include <linux/slab.h>
20
21struct crypto_cbc_ctx {
22 struct crypto_cipher *child;
23 void (*xor)(u8 *dst, const u8 *src, unsigned int bs);
24};
25
26static int crypto_cbc_setkey(struct crypto_tfm *parent, const u8 *key,
27 unsigned int keylen)
28{
29 struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(parent);
30 struct crypto_cipher *child = ctx->child;
31 int err;
32
33 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
34 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
35 CRYPTO_TFM_REQ_MASK);
36 err = crypto_cipher_setkey(child, key, keylen);
37 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
38 CRYPTO_TFM_RES_MASK);
39 return err;
40}
41
42static int crypto_cbc_encrypt_segment(struct blkcipher_desc *desc,
43 struct blkcipher_walk *walk,
44 struct crypto_cipher *tfm,
45 void (*xor)(u8 *, const u8 *,
46 unsigned int))
47{
48 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
49 crypto_cipher_alg(tfm)->cia_encrypt;
50 int bsize = crypto_cipher_blocksize(tfm);
51 unsigned int nbytes = walk->nbytes;
52 u8 *src = walk->src.virt.addr;
53 u8 *dst = walk->dst.virt.addr;
54 u8 *iv = walk->iv;
55
56 do {
57 xor(iv, src, bsize);
58 fn(crypto_cipher_tfm(tfm), dst, iv);
59 memcpy(iv, dst, bsize);
60
61 src += bsize;
62 dst += bsize;
63 } while ((nbytes -= bsize) >= bsize);
64
65 return nbytes;
66}
67
68static int crypto_cbc_encrypt_inplace(struct blkcipher_desc *desc,
69 struct blkcipher_walk *walk,
70 struct crypto_cipher *tfm,
71 void (*xor)(u8 *, const u8 *,
72 unsigned int))
73{
74 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
75 crypto_cipher_alg(tfm)->cia_encrypt;
76 int bsize = crypto_cipher_blocksize(tfm);
77 unsigned int nbytes = walk->nbytes;
78 u8 *src = walk->src.virt.addr;
79 u8 *iv = walk->iv;
80
81 do {
82 xor(src, iv, bsize);
83 fn(crypto_cipher_tfm(tfm), src, src);
84 iv = src;
85
86 src += bsize;
87 } while ((nbytes -= bsize) >= bsize);
88
89 memcpy(walk->iv, iv, bsize);
90
91 return nbytes;
92}
93
94static int crypto_cbc_encrypt(struct blkcipher_desc *desc,
95 struct scatterlist *dst, struct scatterlist *src,
96 unsigned int nbytes)
97{
98 struct blkcipher_walk walk;
99 struct crypto_blkcipher *tfm = desc->tfm;
100 struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
101 struct crypto_cipher *child = ctx->child;
102 void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor;
103 int err;
104
105 blkcipher_walk_init(&walk, dst, src, nbytes);
106 err = blkcipher_walk_virt(desc, &walk);
107
108 while ((nbytes = walk.nbytes)) {
109 if (walk.src.virt.addr == walk.dst.virt.addr)
110 nbytes = crypto_cbc_encrypt_inplace(desc, &walk, child,
111 xor);
112 else
113 nbytes = crypto_cbc_encrypt_segment(desc, &walk, child,
114 xor);
115 err = blkcipher_walk_done(desc, &walk, nbytes);
116 }
117
118 return err;
119}
120
121static int crypto_cbc_decrypt_segment(struct blkcipher_desc *desc,
122 struct blkcipher_walk *walk,
123 struct crypto_cipher *tfm,
124 void (*xor)(u8 *, const u8 *,
125 unsigned int))
126{
127 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
128 crypto_cipher_alg(tfm)->cia_decrypt;
129 int bsize = crypto_cipher_blocksize(tfm);
130 unsigned int nbytes = walk->nbytes;
131 u8 *src = walk->src.virt.addr;
132 u8 *dst = walk->dst.virt.addr;
133 u8 *iv = walk->iv;
134
135 do {
136 fn(crypto_cipher_tfm(tfm), dst, src);
137 xor(dst, iv, bsize);
138 iv = src;
139
140 src += bsize;
141 dst += bsize;
142 } while ((nbytes -= bsize) >= bsize);
143
144 memcpy(walk->iv, iv, bsize);
145
146 return nbytes;
147}
148
149static int crypto_cbc_decrypt_inplace(struct blkcipher_desc *desc,
150 struct blkcipher_walk *walk,
151 struct crypto_cipher *tfm,
152 void (*xor)(u8 *, const u8 *,
153 unsigned int))
154{
155 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
156 crypto_cipher_alg(tfm)->cia_decrypt;
157 int bsize = crypto_cipher_blocksize(tfm);
158 unsigned long alignmask = crypto_cipher_alignmask(tfm);
159 unsigned int nbytes = walk->nbytes;
160 u8 *src = walk->src.virt.addr;
161 u8 stack[bsize + alignmask];
162 u8 *first_iv = (u8 *)ALIGN((unsigned long)stack, alignmask + 1);
163
164 memcpy(first_iv, walk->iv, bsize);
165
166 /* Start of the last block. */
167 src += nbytes - nbytes % bsize - bsize;
168 memcpy(walk->iv, src, bsize);
169
170 for (;;) {
171 fn(crypto_cipher_tfm(tfm), src, src);
172 if ((nbytes -= bsize) < bsize)
173 break;
174 xor(src, src - bsize, bsize);
175 src -= bsize;
176 }
177
178 xor(src, first_iv, bsize);
179
180 return nbytes;
181}
182
183static int crypto_cbc_decrypt(struct blkcipher_desc *desc,
184 struct scatterlist *dst, struct scatterlist *src,
185 unsigned int nbytes)
186{
187 struct blkcipher_walk walk;
188 struct crypto_blkcipher *tfm = desc->tfm;
189 struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
190 struct crypto_cipher *child = ctx->child;
191 void (*xor)(u8 *, const u8 *, unsigned int bs) = ctx->xor;
192 int err;
193
194 blkcipher_walk_init(&walk, dst, src, nbytes);
195 err = blkcipher_walk_virt(desc, &walk);
196
197 while ((nbytes = walk.nbytes)) {
198 if (walk.src.virt.addr == walk.dst.virt.addr)
199 nbytes = crypto_cbc_decrypt_inplace(desc, &walk, child,
200 xor);
201 else
202 nbytes = crypto_cbc_decrypt_segment(desc, &walk, child,
203 xor);
204 err = blkcipher_walk_done(desc, &walk, nbytes);
205 }
206
207 return err;
208}
209
210static void xor_byte(u8 *a, const u8 *b, unsigned int bs)
211{
212 do {
213 *a++ ^= *b++;
214 } while (--bs);
215}
216
217static void xor_quad(u8 *dst, const u8 *src, unsigned int bs)
218{
219 u32 *a = (u32 *)dst;
220 u32 *b = (u32 *)src;
221
222 do {
223 *a++ ^= *b++;
224 } while ((bs -= 4));
225}
226
227static void xor_64(u8 *a, const u8 *b, unsigned int bs)
228{
229 ((u32 *)a)[0] ^= ((u32 *)b)[0];
230 ((u32 *)a)[1] ^= ((u32 *)b)[1];
231}
232
233static void xor_128(u8 *a, const u8 *b, unsigned int bs)
234{
235 ((u32 *)a)[0] ^= ((u32 *)b)[0];
236 ((u32 *)a)[1] ^= ((u32 *)b)[1];
237 ((u32 *)a)[2] ^= ((u32 *)b)[2];
238 ((u32 *)a)[3] ^= ((u32 *)b)[3];
239}
240
241static int crypto_cbc_init_tfm(struct crypto_tfm *tfm)
242{
243 struct crypto_instance *inst = (void *)tfm->__crt_alg;
244 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
245 struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
246
247 switch (crypto_tfm_alg_blocksize(tfm)) {
248 case 8:
249 ctx->xor = xor_64;
250 break;
251
252 case 16:
253 ctx->xor = xor_128;
254 break;
255
256 default:
257 if (crypto_tfm_alg_blocksize(tfm) % 4)
258 ctx->xor = xor_byte;
259 else
260 ctx->xor = xor_quad;
261 }
262
263 tfm = crypto_spawn_tfm(spawn);
264 if (IS_ERR(tfm))
265 return PTR_ERR(tfm);
266
267 ctx->child = crypto_cipher_cast(tfm);
268 return 0;
269}
270
271static void crypto_cbc_exit_tfm(struct crypto_tfm *tfm)
272{
273 struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
274 crypto_free_cipher(ctx->child);
275}
276
277static struct crypto_instance *crypto_cbc_alloc(void *param, unsigned int len)
278{
279 struct crypto_instance *inst;
280 struct crypto_alg *alg;
281
282 alg = crypto_get_attr_alg(param, len, CRYPTO_ALG_TYPE_CIPHER,
283 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
284 if (IS_ERR(alg))
285 return ERR_PTR(PTR_ERR(alg));
286
287 inst = crypto_alloc_instance("cbc", alg);
288 if (IS_ERR(inst))
289 goto out_put_alg;
290
291 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
292 inst->alg.cra_priority = alg->cra_priority;
293 inst->alg.cra_blocksize = alg->cra_blocksize;
294 inst->alg.cra_alignmask = alg->cra_alignmask;
295 inst->alg.cra_type = &crypto_blkcipher_type;
296
297 if (!(alg->cra_blocksize % 4))
298 inst->alg.cra_alignmask |= 3;
299 inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
300 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
301 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
302
303 inst->alg.cra_ctxsize = sizeof(struct crypto_cbc_ctx);
304
305 inst->alg.cra_init = crypto_cbc_init_tfm;
306 inst->alg.cra_exit = crypto_cbc_exit_tfm;
307
308 inst->alg.cra_blkcipher.setkey = crypto_cbc_setkey;
309 inst->alg.cra_blkcipher.encrypt = crypto_cbc_encrypt;
310 inst->alg.cra_blkcipher.decrypt = crypto_cbc_decrypt;
311
312out_put_alg:
313 crypto_mod_put(alg);
314 return inst;
315}
316
317static void crypto_cbc_free(struct crypto_instance *inst)
318{
319 crypto_drop_spawn(crypto_instance_ctx(inst));
320 kfree(inst);
321}
322
323static struct crypto_template crypto_cbc_tmpl = {
324 .name = "cbc",
325 .alloc = crypto_cbc_alloc,
326 .free = crypto_cbc_free,
327 .module = THIS_MODULE,
328};
329
330static int __init crypto_cbc_module_init(void)
331{
332 return crypto_register_template(&crypto_cbc_tmpl);
333}
334
335static void __exit crypto_cbc_module_exit(void)
336{
337 crypto_unregister_template(&crypto_cbc_tmpl);
338}
339
340module_init(crypto_cbc_module_init);
341module_exit(crypto_cbc_module_exit);
342
343MODULE_LICENSE("GPL");
344MODULE_DESCRIPTION("CBC block cipher algorithm");
diff --git a/crypto/cipher.c b/crypto/cipher.c
index b899eb97abd7..9e03701cfdcc 100644
--- a/crypto/cipher.c
+++ b/crypto/cipher.c
@@ -23,6 +23,28 @@
23#include "internal.h" 23#include "internal.h"
24#include "scatterwalk.h" 24#include "scatterwalk.h"
25 25
26struct cipher_alg_compat {
27 unsigned int cia_min_keysize;
28 unsigned int cia_max_keysize;
29 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
30 unsigned int keylen);
31 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
32 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
33
34 unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc,
35 u8 *dst, const u8 *src,
36 unsigned int nbytes);
37 unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc,
38 u8 *dst, const u8 *src,
39 unsigned int nbytes);
40 unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc,
41 u8 *dst, const u8 *src,
42 unsigned int nbytes);
43 unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc,
44 u8 *dst, const u8 *src,
45 unsigned int nbytes);
46};
47
26static inline void xor_64(u8 *a, const u8 *b) 48static inline void xor_64(u8 *a, const u8 *b)
27{ 49{
28 ((u32 *)a)[0] ^= ((u32 *)b)[0]; 50 ((u32 *)a)[0] ^= ((u32 *)b)[0];
@@ -45,15 +67,10 @@ static unsigned int crypt_slow(const struct cipher_desc *desc,
45 u8 buffer[bsize * 2 + alignmask]; 67 u8 buffer[bsize * 2 + alignmask];
46 u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 68 u8 *src = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
47 u8 *dst = src + bsize; 69 u8 *dst = src + bsize;
48 unsigned int n;
49
50 n = scatterwalk_copychunks(src, in, bsize, 0);
51 scatterwalk_advance(in, n);
52 70
71 scatterwalk_copychunks(src, in, bsize, 0);
53 desc->prfn(desc, dst, src, bsize); 72 desc->prfn(desc, dst, src, bsize);
54 73 scatterwalk_copychunks(dst, out, bsize, 1);
55 n = scatterwalk_copychunks(dst, out, bsize, 1);
56 scatterwalk_advance(out, n);
57 74
58 return bsize; 75 return bsize;
59} 76}
@@ -64,12 +81,16 @@ static inline unsigned int crypt_fast(const struct cipher_desc *desc,
64 unsigned int nbytes, u8 *tmp) 81 unsigned int nbytes, u8 *tmp)
65{ 82{
66 u8 *src, *dst; 83 u8 *src, *dst;
84 u8 *real_src, *real_dst;
85
86 real_src = scatterwalk_map(in, 0);
87 real_dst = scatterwalk_map(out, 1);
67 88
68 src = in->data; 89 src = real_src;
69 dst = scatterwalk_samebuf(in, out) ? src : out->data; 90 dst = scatterwalk_samebuf(in, out) ? src : real_dst;
70 91
71 if (tmp) { 92 if (tmp) {
72 memcpy(tmp, in->data, nbytes); 93 memcpy(tmp, src, nbytes);
73 src = tmp; 94 src = tmp;
74 dst = tmp; 95 dst = tmp;
75 } 96 }
@@ -77,7 +98,10 @@ static inline unsigned int crypt_fast(const struct cipher_desc *desc,
77 nbytes = desc->prfn(desc, dst, src, nbytes); 98 nbytes = desc->prfn(desc, dst, src, nbytes);
78 99
79 if (tmp) 100 if (tmp)
80 memcpy(out->data, tmp, nbytes); 101 memcpy(real_dst, tmp, nbytes);
102
103 scatterwalk_unmap(real_src, 0);
104 scatterwalk_unmap(real_dst, 1);
81 105
82 scatterwalk_advance(in, nbytes); 106 scatterwalk_advance(in, nbytes);
83 scatterwalk_advance(out, nbytes); 107 scatterwalk_advance(out, nbytes);
@@ -126,9 +150,6 @@ static int crypt(const struct cipher_desc *desc,
126 tmp = (u8 *)buffer; 150 tmp = (u8 *)buffer;
127 } 151 }
128 152
129 scatterwalk_map(&walk_in, 0);
130 scatterwalk_map(&walk_out, 1);
131
132 n = scatterwalk_clamp(&walk_in, n); 153 n = scatterwalk_clamp(&walk_in, n);
133 n = scatterwalk_clamp(&walk_out, n); 154 n = scatterwalk_clamp(&walk_out, n);
134 155
@@ -145,7 +166,7 @@ static int crypt(const struct cipher_desc *desc,
145 if (!nbytes) 166 if (!nbytes)
146 break; 167 break;
147 168
148 crypto_yield(tfm); 169 crypto_yield(tfm->crt_flags);
149 } 170 }
150 171
151 if (buffer) 172 if (buffer)
@@ -264,12 +285,12 @@ static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
264{ 285{
265 struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher; 286 struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
266 287
288 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
267 if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) { 289 if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
268 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 290 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
269 return -EINVAL; 291 return -EINVAL;
270 } else 292 } else
271 return cia->cia_setkey(tfm, key, keylen, 293 return cia->cia_setkey(tfm, key, keylen);
272 &tfm->crt_flags);
273} 294}
274 295
275static int ecb_encrypt(struct crypto_tfm *tfm, 296static int ecb_encrypt(struct crypto_tfm *tfm,
@@ -277,7 +298,7 @@ static int ecb_encrypt(struct crypto_tfm *tfm,
277 struct scatterlist *src, unsigned int nbytes) 298 struct scatterlist *src, unsigned int nbytes)
278{ 299{
279 struct cipher_desc desc; 300 struct cipher_desc desc;
280 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; 301 struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
281 302
282 desc.tfm = tfm; 303 desc.tfm = tfm;
283 desc.crfn = cipher->cia_encrypt; 304 desc.crfn = cipher->cia_encrypt;
@@ -292,7 +313,7 @@ static int ecb_decrypt(struct crypto_tfm *tfm,
292 unsigned int nbytes) 313 unsigned int nbytes)
293{ 314{
294 struct cipher_desc desc; 315 struct cipher_desc desc;
295 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; 316 struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
296 317
297 desc.tfm = tfm; 318 desc.tfm = tfm;
298 desc.crfn = cipher->cia_decrypt; 319 desc.crfn = cipher->cia_decrypt;
@@ -307,7 +328,7 @@ static int cbc_encrypt(struct crypto_tfm *tfm,
307 unsigned int nbytes) 328 unsigned int nbytes)
308{ 329{
309 struct cipher_desc desc; 330 struct cipher_desc desc;
310 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; 331 struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
311 332
312 desc.tfm = tfm; 333 desc.tfm = tfm;
313 desc.crfn = cipher->cia_encrypt; 334 desc.crfn = cipher->cia_encrypt;
@@ -323,7 +344,7 @@ static int cbc_encrypt_iv(struct crypto_tfm *tfm,
323 unsigned int nbytes, u8 *iv) 344 unsigned int nbytes, u8 *iv)
324{ 345{
325 struct cipher_desc desc; 346 struct cipher_desc desc;
326 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; 347 struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
327 348
328 desc.tfm = tfm; 349 desc.tfm = tfm;
329 desc.crfn = cipher->cia_encrypt; 350 desc.crfn = cipher->cia_encrypt;
@@ -339,7 +360,7 @@ static int cbc_decrypt(struct crypto_tfm *tfm,
339 unsigned int nbytes) 360 unsigned int nbytes)
340{ 361{
341 struct cipher_desc desc; 362 struct cipher_desc desc;
342 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; 363 struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
343 364
344 desc.tfm = tfm; 365 desc.tfm = tfm;
345 desc.crfn = cipher->cia_decrypt; 366 desc.crfn = cipher->cia_decrypt;
@@ -355,7 +376,7 @@ static int cbc_decrypt_iv(struct crypto_tfm *tfm,
355 unsigned int nbytes, u8 *iv) 376 unsigned int nbytes, u8 *iv)
356{ 377{
357 struct cipher_desc desc; 378 struct cipher_desc desc;
358 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher; 379 struct cipher_alg_compat *cipher = (void *)&tfm->__crt_alg->cra_cipher;
359 380
360 desc.tfm = tfm; 381 desc.tfm = tfm;
361 desc.crfn = cipher->cia_decrypt; 382 desc.crfn = cipher->cia_decrypt;
@@ -388,17 +409,67 @@ int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags)
388 return 0; 409 return 0;
389} 410}
390 411
412static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *,
413 const u8 *),
414 struct crypto_tfm *tfm,
415 u8 *dst, const u8 *src)
416{
417 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
418 unsigned int size = crypto_tfm_alg_blocksize(tfm);
419 u8 buffer[size + alignmask];
420 u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
421
422 memcpy(tmp, src, size);
423 fn(tfm, tmp, tmp);
424 memcpy(dst, tmp, size);
425}
426
427static void cipher_encrypt_unaligned(struct crypto_tfm *tfm,
428 u8 *dst, const u8 *src)
429{
430 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
431 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
432
433 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
434 cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src);
435 return;
436 }
437
438 cipher->cia_encrypt(tfm, dst, src);
439}
440
441static void cipher_decrypt_unaligned(struct crypto_tfm *tfm,
442 u8 *dst, const u8 *src)
443{
444 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
445 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
446
447 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
448 cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src);
449 return;
450 }
451
452 cipher->cia_decrypt(tfm, dst, src);
453}
454
391int crypto_init_cipher_ops(struct crypto_tfm *tfm) 455int crypto_init_cipher_ops(struct crypto_tfm *tfm)
392{ 456{
393 int ret = 0; 457 int ret = 0;
394 struct cipher_tfm *ops = &tfm->crt_cipher; 458 struct cipher_tfm *ops = &tfm->crt_cipher;
459 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
395 460
396 ops->cit_setkey = setkey; 461 ops->cit_setkey = setkey;
462 ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ?
463 cipher_encrypt_unaligned : cipher->cia_encrypt;
464 ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ?
465 cipher_decrypt_unaligned : cipher->cia_decrypt;
397 466
398 switch (tfm->crt_cipher.cit_mode) { 467 switch (tfm->crt_cipher.cit_mode) {
399 case CRYPTO_TFM_MODE_ECB: 468 case CRYPTO_TFM_MODE_ECB:
400 ops->cit_encrypt = ecb_encrypt; 469 ops->cit_encrypt = ecb_encrypt;
401 ops->cit_decrypt = ecb_decrypt; 470 ops->cit_decrypt = ecb_decrypt;
471 ops->cit_encrypt_iv = nocrypt_iv;
472 ops->cit_decrypt_iv = nocrypt_iv;
402 break; 473 break;
403 474
404 case CRYPTO_TFM_MODE_CBC: 475 case CRYPTO_TFM_MODE_CBC:
diff --git a/crypto/crc32c.c b/crypto/crc32c.c
index f2660123aeb4..0fa744392a4c 100644
--- a/crypto/crc32c.c
+++ b/crypto/crc32c.c
@@ -16,14 +16,14 @@
16#include <linux/string.h> 16#include <linux/string.h>
17#include <linux/crypto.h> 17#include <linux/crypto.h>
18#include <linux/crc32c.h> 18#include <linux/crc32c.h>
19#include <linux/types.h> 19#include <linux/kernel.h>
20#include <asm/byteorder.h>
21 20
22#define CHKSUM_BLOCK_SIZE 32 21#define CHKSUM_BLOCK_SIZE 32
23#define CHKSUM_DIGEST_SIZE 4 22#define CHKSUM_DIGEST_SIZE 4
24 23
25struct chksum_ctx { 24struct chksum_ctx {
26 u32 crc; 25 u32 crc;
26 u32 key;
27}; 27};
28 28
29/* 29/*
@@ -35,7 +35,7 @@ static void chksum_init(struct crypto_tfm *tfm)
35{ 35{
36 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); 36 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
37 37
38 mctx->crc = ~(u32)0; /* common usage */ 38 mctx->crc = mctx->key;
39} 39}
40 40
41/* 41/*
@@ -44,16 +44,15 @@ static void chksum_init(struct crypto_tfm *tfm)
44 * the seed. 44 * the seed.
45 */ 45 */
46static int chksum_setkey(struct crypto_tfm *tfm, const u8 *key, 46static int chksum_setkey(struct crypto_tfm *tfm, const u8 *key,
47 unsigned int keylen, u32 *flags) 47 unsigned int keylen)
48{ 48{
49 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); 49 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
50 50
51 if (keylen != sizeof(mctx->crc)) { 51 if (keylen != sizeof(mctx->crc)) {
52 if (flags) 52 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
53 *flags = CRYPTO_TFM_RES_BAD_KEY_LEN;
54 return -EINVAL; 53 return -EINVAL;
55 } 54 }
56 mctx->crc = __cpu_to_le32(*(u32 *)key); 55 mctx->key = le32_to_cpu(*(__le32 *)key);
57 return 0; 56 return 0;
58} 57}
59 58
@@ -61,19 +60,23 @@ static void chksum_update(struct crypto_tfm *tfm, const u8 *data,
61 unsigned int length) 60 unsigned int length)
62{ 61{
63 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); 62 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
64 u32 mcrc;
65 63
66 mcrc = crc32c(mctx->crc, data, (size_t)length); 64 mctx->crc = crc32c(mctx->crc, data, length);
67
68 mctx->crc = mcrc;
69} 65}
70 66
71static void chksum_final(struct crypto_tfm *tfm, u8 *out) 67static void chksum_final(struct crypto_tfm *tfm, u8 *out)
72{ 68{
73 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); 69 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
74 u32 mcrc = (mctx->crc ^ ~(u32)0);
75 70
76 *(u32 *)out = __le32_to_cpu(mcrc); 71 *(__le32 *)out = ~cpu_to_le32(mctx->crc);
72}
73
74static int crc32c_cra_init(struct crypto_tfm *tfm)
75{
76 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
77
78 mctx->key = ~0;
79 return 0;
77} 80}
78 81
79static struct crypto_alg alg = { 82static struct crypto_alg alg = {
@@ -83,6 +86,7 @@ static struct crypto_alg alg = {
83 .cra_ctxsize = sizeof(struct chksum_ctx), 86 .cra_ctxsize = sizeof(struct chksum_ctx),
84 .cra_module = THIS_MODULE, 87 .cra_module = THIS_MODULE,
85 .cra_list = LIST_HEAD_INIT(alg.cra_list), 88 .cra_list = LIST_HEAD_INIT(alg.cra_list),
89 .cra_init = crc32c_cra_init,
86 .cra_u = { 90 .cra_u = {
87 .digest = { 91 .digest = {
88 .dia_digestsize= CHKSUM_DIGEST_SIZE, 92 .dia_digestsize= CHKSUM_DIGEST_SIZE,
diff --git a/crypto/crypto_null.c b/crypto/crypto_null.c
index a0d956b52949..24dbb5d8617e 100644
--- a/crypto/crypto_null.c
+++ b/crypto/crypto_null.c
@@ -48,7 +48,7 @@ static void null_final(struct crypto_tfm *tfm, u8 *out)
48{ } 48{ }
49 49
50static int null_setkey(struct crypto_tfm *tfm, const u8 *key, 50static int null_setkey(struct crypto_tfm *tfm, const u8 *key,
51 unsigned int keylen, u32 *flags) 51 unsigned int keylen)
52{ return 0; } 52{ return 0; }
53 53
54static void null_crypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 54static void null_crypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
diff --git a/crypto/cryptomgr.c b/crypto/cryptomgr.c
new file mode 100644
index 000000000000..9b5b15601068
--- /dev/null
+++ b/crypto/cryptomgr.c
@@ -0,0 +1,156 @@
1/*
2 * Create default crypto algorithm instances.
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#include <linux/crypto.h>
14#include <linux/ctype.h>
15#include <linux/err.h>
16#include <linux/init.h>
17#include <linux/module.h>
18#include <linux/notifier.h>
19#include <linux/rtnetlink.h>
20#include <linux/sched.h>
21#include <linux/string.h>
22#include <linux/workqueue.h>
23
24#include "internal.h"
25
26struct cryptomgr_param {
27 struct work_struct work;
28
29 struct {
30 struct rtattr attr;
31 struct crypto_attr_alg data;
32 } alg;
33
34 struct {
35 u32 type;
36 u32 mask;
37 char name[CRYPTO_MAX_ALG_NAME];
38 } larval;
39
40 char template[CRYPTO_MAX_ALG_NAME];
41};
42
43static void cryptomgr_probe(void *data)
44{
45 struct cryptomgr_param *param = data;
46 struct crypto_template *tmpl;
47 struct crypto_instance *inst;
48 int err;
49
50 tmpl = crypto_lookup_template(param->template);
51 if (!tmpl)
52 goto err;
53
54 do {
55 inst = tmpl->alloc(&param->alg, sizeof(param->alg));
56 if (IS_ERR(inst))
57 err = PTR_ERR(inst);
58 else if ((err = crypto_register_instance(tmpl, inst)))
59 tmpl->free(inst);
60 } while (err == -EAGAIN && !signal_pending(current));
61
62 crypto_tmpl_put(tmpl);
63
64 if (err)
65 goto err;
66
67out:
68 kfree(param);
69 return;
70
71err:
72 crypto_larval_error(param->larval.name, param->larval.type,
73 param->larval.mask);
74 goto out;
75}
76
77static int cryptomgr_schedule_probe(struct crypto_larval *larval)
78{
79 struct cryptomgr_param *param;
80 const char *name = larval->alg.cra_name;
81 const char *p;
82 unsigned int len;
83
84 param = kmalloc(sizeof(*param), GFP_KERNEL);
85 if (!param)
86 goto err;
87
88 for (p = name; isalnum(*p) || *p == '-' || *p == '_'; p++)
89 ;
90
91 len = p - name;
92 if (!len || *p != '(')
93 goto err_free_param;
94
95 memcpy(param->template, name, len);
96 param->template[len] = 0;
97
98 name = p + 1;
99 for (p = name; isalnum(*p) || *p == '-' || *p == '_'; p++)
100 ;
101
102 len = p - name;
103 if (!len || *p != ')' || p[1])
104 goto err_free_param;
105
106 param->alg.attr.rta_len = sizeof(param->alg);
107 param->alg.attr.rta_type = CRYPTOA_ALG;
108 memcpy(param->alg.data.name, name, len);
109 param->alg.data.name[len] = 0;
110
111 memcpy(param->larval.name, larval->alg.cra_name, CRYPTO_MAX_ALG_NAME);
112 param->larval.type = larval->alg.cra_flags;
113 param->larval.mask = larval->mask;
114
115 INIT_WORK(&param->work, cryptomgr_probe, param);
116 schedule_work(&param->work);
117
118 return NOTIFY_STOP;
119
120err_free_param:
121 kfree(param);
122err:
123 return NOTIFY_OK;
124}
125
126static int cryptomgr_notify(struct notifier_block *this, unsigned long msg,
127 void *data)
128{
129 switch (msg) {
130 case CRYPTO_MSG_ALG_REQUEST:
131 return cryptomgr_schedule_probe(data);
132 }
133
134 return NOTIFY_DONE;
135}
136
137static struct notifier_block cryptomgr_notifier = {
138 .notifier_call = cryptomgr_notify,
139};
140
141static int __init cryptomgr_init(void)
142{
143 return crypto_register_notifier(&cryptomgr_notifier);
144}
145
146static void __exit cryptomgr_exit(void)
147{
148 int err = crypto_unregister_notifier(&cryptomgr_notifier);
149 BUG_ON(err);
150}
151
152module_init(cryptomgr_init);
153module_exit(cryptomgr_exit);
154
155MODULE_LICENSE("GPL");
156MODULE_DESCRIPTION("Crypto Algorithm Manager");
diff --git a/crypto/des.c b/crypto/des.c
index a9d3c235a6af..1df3a714fa47 100644
--- a/crypto/des.c
+++ b/crypto/des.c
@@ -784,9 +784,10 @@ static void dkey(u32 *pe, const u8 *k)
784} 784}
785 785
786static int des_setkey(struct crypto_tfm *tfm, const u8 *key, 786static int des_setkey(struct crypto_tfm *tfm, const u8 *key,
787 unsigned int keylen, u32 *flags) 787 unsigned int keylen)
788{ 788{
789 struct des_ctx *dctx = crypto_tfm_ctx(tfm); 789 struct des_ctx *dctx = crypto_tfm_ctx(tfm);
790 u32 *flags = &tfm->crt_flags;
790 u32 tmp[DES_EXPKEY_WORDS]; 791 u32 tmp[DES_EXPKEY_WORDS];
791 int ret; 792 int ret;
792 793
@@ -864,11 +865,12 @@ static void des_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
864 * 865 *
865 */ 866 */
866static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key, 867static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key,
867 unsigned int keylen, u32 *flags) 868 unsigned int keylen)
868{ 869{
869 const u32 *K = (const u32 *)key; 870 const u32 *K = (const u32 *)key;
870 struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm); 871 struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm);
871 u32 *expkey = dctx->expkey; 872 u32 *expkey = dctx->expkey;
873 u32 *flags = &tfm->crt_flags;
872 874
873 if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) || 875 if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
874 !((K[2] ^ K[4]) | (K[3] ^ K[5])))) 876 !((K[2] ^ K[4]) | (K[3] ^ K[5]))))
diff --git a/crypto/digest.c b/crypto/digest.c
index 603006a7bef2..0155a94e4b15 100644
--- a/crypto/digest.c
+++ b/crypto/digest.c
@@ -11,29 +11,89 @@
11 * any later version. 11 * any later version.
12 * 12 *
13 */ 13 */
14#include <linux/crypto.h> 14
15#include <linux/mm.h> 15#include <linux/mm.h>
16#include <linux/errno.h> 16#include <linux/errno.h>
17#include <linux/highmem.h> 17#include <linux/highmem.h>
18#include <asm/scatterlist.h> 18#include <linux/module.h>
19#include <linux/scatterlist.h>
20
19#include "internal.h" 21#include "internal.h"
22#include "scatterwalk.h"
20 23
21static void init(struct crypto_tfm *tfm) 24void crypto_digest_init(struct crypto_tfm *tfm)
22{ 25{
23 tfm->__crt_alg->cra_digest.dia_init(tfm); 26 struct crypto_hash *hash = crypto_hash_cast(tfm);
27 struct hash_desc desc = { .tfm = hash, .flags = tfm->crt_flags };
28
29 crypto_hash_init(&desc);
30}
31EXPORT_SYMBOL_GPL(crypto_digest_init);
32
33void crypto_digest_update(struct crypto_tfm *tfm,
34 struct scatterlist *sg, unsigned int nsg)
35{
36 struct crypto_hash *hash = crypto_hash_cast(tfm);
37 struct hash_desc desc = { .tfm = hash, .flags = tfm->crt_flags };
38 unsigned int nbytes = 0;
39 unsigned int i;
40
41 for (i = 0; i < nsg; i++)
42 nbytes += sg[i].length;
43
44 crypto_hash_update(&desc, sg, nbytes);
45}
46EXPORT_SYMBOL_GPL(crypto_digest_update);
47
48void crypto_digest_final(struct crypto_tfm *tfm, u8 *out)
49{
50 struct crypto_hash *hash = crypto_hash_cast(tfm);
51 struct hash_desc desc = { .tfm = hash, .flags = tfm->crt_flags };
52
53 crypto_hash_final(&desc, out);
24} 54}
55EXPORT_SYMBOL_GPL(crypto_digest_final);
25 56
26static void update(struct crypto_tfm *tfm, 57void crypto_digest_digest(struct crypto_tfm *tfm,
27 struct scatterlist *sg, unsigned int nsg) 58 struct scatterlist *sg, unsigned int nsg, u8 *out)
28{ 59{
60 struct crypto_hash *hash = crypto_hash_cast(tfm);
61 struct hash_desc desc = { .tfm = hash, .flags = tfm->crt_flags };
62 unsigned int nbytes = 0;
29 unsigned int i; 63 unsigned int i;
64
65 for (i = 0; i < nsg; i++)
66 nbytes += sg[i].length;
67
68 crypto_hash_digest(&desc, sg, nbytes, out);
69}
70EXPORT_SYMBOL_GPL(crypto_digest_digest);
71
72static int init(struct hash_desc *desc)
73{
74 struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm);
75
76 tfm->__crt_alg->cra_digest.dia_init(tfm);
77 return 0;
78}
79
80static int update(struct hash_desc *desc,
81 struct scatterlist *sg, unsigned int nbytes)
82{
83 struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm);
30 unsigned int alignmask = crypto_tfm_alg_alignmask(tfm); 84 unsigned int alignmask = crypto_tfm_alg_alignmask(tfm);
31 85
32 for (i = 0; i < nsg; i++) { 86 if (!nbytes)
87 return 0;
88
89 for (;;) {
90 struct page *pg = sg->page;
91 unsigned int offset = sg->offset;
92 unsigned int l = sg->length;
33 93
34 struct page *pg = sg[i].page; 94 if (unlikely(l > nbytes))
35 unsigned int offset = sg[i].offset; 95 l = nbytes;
36 unsigned int l = sg[i].length; 96 nbytes -= l;
37 97
38 do { 98 do {
39 unsigned int bytes_from_page = min(l, ((unsigned int) 99 unsigned int bytes_from_page = min(l, ((unsigned int)
@@ -55,41 +115,60 @@ static void update(struct crypto_tfm *tfm,
55 tfm->__crt_alg->cra_digest.dia_update(tfm, p, 115 tfm->__crt_alg->cra_digest.dia_update(tfm, p,
56 bytes_from_page); 116 bytes_from_page);
57 crypto_kunmap(src, 0); 117 crypto_kunmap(src, 0);
58 crypto_yield(tfm); 118 crypto_yield(desc->flags);
59 offset = 0; 119 offset = 0;
60 pg++; 120 pg++;
61 l -= bytes_from_page; 121 l -= bytes_from_page;
62 } while (l > 0); 122 } while (l > 0);
123
124 if (!nbytes)
125 break;
126 sg = sg_next(sg);
63 } 127 }
128
129 return 0;
64} 130}
65 131
66static void final(struct crypto_tfm *tfm, u8 *out) 132static int final(struct hash_desc *desc, u8 *out)
67{ 133{
134 struct crypto_tfm *tfm = crypto_hash_tfm(desc->tfm);
68 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); 135 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
136 struct digest_alg *digest = &tfm->__crt_alg->cra_digest;
137
69 if (unlikely((unsigned long)out & alignmask)) { 138 if (unlikely((unsigned long)out & alignmask)) {
70 unsigned int size = crypto_tfm_alg_digestsize(tfm); 139 unsigned long align = alignmask + 1;
71 u8 buffer[size + alignmask]; 140 unsigned long addr = (unsigned long)crypto_tfm_ctx(tfm);
72 u8 *dst = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 141 u8 *dst = (u8 *)ALIGN(addr, align) +
73 tfm->__crt_alg->cra_digest.dia_final(tfm, dst); 142 ALIGN(tfm->__crt_alg->cra_ctxsize, align);
74 memcpy(out, dst, size); 143
144 digest->dia_final(tfm, dst);
145 memcpy(out, dst, digest->dia_digestsize);
75 } else 146 } else
76 tfm->__crt_alg->cra_digest.dia_final(tfm, out); 147 digest->dia_final(tfm, out);
148
149 return 0;
150}
151
152static int nosetkey(struct crypto_hash *tfm, const u8 *key, unsigned int keylen)
153{
154 crypto_hash_clear_flags(tfm, CRYPTO_TFM_RES_MASK);
155 return -ENOSYS;
77} 156}
78 157
79static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen) 158static int setkey(struct crypto_hash *hash, const u8 *key, unsigned int keylen)
80{ 159{
81 u32 flags; 160 struct crypto_tfm *tfm = crypto_hash_tfm(hash);
82 if (tfm->__crt_alg->cra_digest.dia_setkey == NULL) 161
83 return -ENOSYS; 162 crypto_hash_clear_flags(hash, CRYPTO_TFM_RES_MASK);
84 return tfm->__crt_alg->cra_digest.dia_setkey(tfm, key, keylen, &flags); 163 return tfm->__crt_alg->cra_digest.dia_setkey(tfm, key, keylen);
85} 164}
86 165
87static void digest(struct crypto_tfm *tfm, 166static int digest(struct hash_desc *desc,
88 struct scatterlist *sg, unsigned int nsg, u8 *out) 167 struct scatterlist *sg, unsigned int nbytes, u8 *out)
89{ 168{
90 init(tfm); 169 init(desc);
91 update(tfm, sg, nsg); 170 update(desc, sg, nbytes);
92 final(tfm, out); 171 return final(desc, out);
93} 172}
94 173
95int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags) 174int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags)
@@ -99,18 +178,22 @@ int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags)
99 178
100int crypto_init_digest_ops(struct crypto_tfm *tfm) 179int crypto_init_digest_ops(struct crypto_tfm *tfm)
101{ 180{
102 struct digest_tfm *ops = &tfm->crt_digest; 181 struct hash_tfm *ops = &tfm->crt_hash;
182 struct digest_alg *dalg = &tfm->__crt_alg->cra_digest;
183
184 if (dalg->dia_digestsize > crypto_tfm_alg_blocksize(tfm))
185 return -EINVAL;
103 186
104 ops->dit_init = init; 187 ops->init = init;
105 ops->dit_update = update; 188 ops->update = update;
106 ops->dit_final = final; 189 ops->final = final;
107 ops->dit_digest = digest; 190 ops->digest = digest;
108 ops->dit_setkey = setkey; 191 ops->setkey = dalg->dia_setkey ? setkey : nosetkey;
192 ops->digestsize = dalg->dia_digestsize;
109 193
110 return crypto_alloc_hmac_block(tfm); 194 return 0;
111} 195}
112 196
113void crypto_exit_digest_ops(struct crypto_tfm *tfm) 197void crypto_exit_digest_ops(struct crypto_tfm *tfm)
114{ 198{
115 crypto_free_hmac_block(tfm);
116} 199}
diff --git a/crypto/ecb.c b/crypto/ecb.c
new file mode 100644
index 000000000000..f239aa9c4017
--- /dev/null
+++ b/crypto/ecb.c
@@ -0,0 +1,181 @@
1/*
2 * ECB: Electronic CodeBook mode
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#include <crypto/algapi.h>
14#include <linux/err.h>
15#include <linux/init.h>
16#include <linux/kernel.h>
17#include <linux/module.h>
18#include <linux/scatterlist.h>
19#include <linux/slab.h>
20
21struct crypto_ecb_ctx {
22 struct crypto_cipher *child;
23};
24
25static int crypto_ecb_setkey(struct crypto_tfm *parent, const u8 *key,
26 unsigned int keylen)
27{
28 struct crypto_ecb_ctx *ctx = crypto_tfm_ctx(parent);
29 struct crypto_cipher *child = ctx->child;
30 int err;
31
32 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
33 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
34 CRYPTO_TFM_REQ_MASK);
35 err = crypto_cipher_setkey(child, key, keylen);
36 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
37 CRYPTO_TFM_RES_MASK);
38 return err;
39}
40
41static int crypto_ecb_crypt(struct blkcipher_desc *desc,
42 struct blkcipher_walk *walk,
43 struct crypto_cipher *tfm,
44 void (*fn)(struct crypto_tfm *, u8 *, const u8 *))
45{
46 int bsize = crypto_cipher_blocksize(tfm);
47 unsigned int nbytes;
48 int err;
49
50 err = blkcipher_walk_virt(desc, walk);
51
52 while ((nbytes = walk->nbytes)) {
53 u8 *wsrc = walk->src.virt.addr;
54 u8 *wdst = walk->dst.virt.addr;
55
56 do {
57 fn(crypto_cipher_tfm(tfm), wdst, wsrc);
58
59 wsrc += bsize;
60 wdst += bsize;
61 } while ((nbytes -= bsize) >= bsize);
62
63 err = blkcipher_walk_done(desc, walk, nbytes);
64 }
65
66 return err;
67}
68
69static int crypto_ecb_encrypt(struct blkcipher_desc *desc,
70 struct scatterlist *dst, struct scatterlist *src,
71 unsigned int nbytes)
72{
73 struct blkcipher_walk walk;
74 struct crypto_blkcipher *tfm = desc->tfm;
75 struct crypto_ecb_ctx *ctx = crypto_blkcipher_ctx(tfm);
76 struct crypto_cipher *child = ctx->child;
77
78 blkcipher_walk_init(&walk, dst, src, nbytes);
79 return crypto_ecb_crypt(desc, &walk, child,
80 crypto_cipher_alg(child)->cia_encrypt);
81}
82
83static int crypto_ecb_decrypt(struct blkcipher_desc *desc,
84 struct scatterlist *dst, struct scatterlist *src,
85 unsigned int nbytes)
86{
87 struct blkcipher_walk walk;
88 struct crypto_blkcipher *tfm = desc->tfm;
89 struct crypto_ecb_ctx *ctx = crypto_blkcipher_ctx(tfm);
90 struct crypto_cipher *child = ctx->child;
91
92 blkcipher_walk_init(&walk, dst, src, nbytes);
93 return crypto_ecb_crypt(desc, &walk, child,
94 crypto_cipher_alg(child)->cia_decrypt);
95}
96
97static int crypto_ecb_init_tfm(struct crypto_tfm *tfm)
98{
99 struct crypto_instance *inst = (void *)tfm->__crt_alg;
100 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
101 struct crypto_ecb_ctx *ctx = crypto_tfm_ctx(tfm);
102
103 tfm = crypto_spawn_tfm(spawn);
104 if (IS_ERR(tfm))
105 return PTR_ERR(tfm);
106
107 ctx->child = crypto_cipher_cast(tfm);
108 return 0;
109}
110
111static void crypto_ecb_exit_tfm(struct crypto_tfm *tfm)
112{
113 struct crypto_ecb_ctx *ctx = crypto_tfm_ctx(tfm);
114 crypto_free_cipher(ctx->child);
115}
116
117static struct crypto_instance *crypto_ecb_alloc(void *param, unsigned int len)
118{
119 struct crypto_instance *inst;
120 struct crypto_alg *alg;
121
122 alg = crypto_get_attr_alg(param, len, CRYPTO_ALG_TYPE_CIPHER,
123 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
124 if (IS_ERR(alg))
125 return ERR_PTR(PTR_ERR(alg));
126
127 inst = crypto_alloc_instance("ecb", alg);
128 if (IS_ERR(inst))
129 goto out_put_alg;
130
131 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
132 inst->alg.cra_priority = alg->cra_priority;
133 inst->alg.cra_blocksize = alg->cra_blocksize;
134 inst->alg.cra_alignmask = alg->cra_alignmask;
135 inst->alg.cra_type = &crypto_blkcipher_type;
136
137 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
138 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
139
140 inst->alg.cra_ctxsize = sizeof(struct crypto_ecb_ctx);
141
142 inst->alg.cra_init = crypto_ecb_init_tfm;
143 inst->alg.cra_exit = crypto_ecb_exit_tfm;
144
145 inst->alg.cra_blkcipher.setkey = crypto_ecb_setkey;
146 inst->alg.cra_blkcipher.encrypt = crypto_ecb_encrypt;
147 inst->alg.cra_blkcipher.decrypt = crypto_ecb_decrypt;
148
149out_put_alg:
150 crypto_mod_put(alg);
151 return inst;
152}
153
154static void crypto_ecb_free(struct crypto_instance *inst)
155{
156 crypto_drop_spawn(crypto_instance_ctx(inst));
157 kfree(inst);
158}
159
160static struct crypto_template crypto_ecb_tmpl = {
161 .name = "ecb",
162 .alloc = crypto_ecb_alloc,
163 .free = crypto_ecb_free,
164 .module = THIS_MODULE,
165};
166
167static int __init crypto_ecb_module_init(void)
168{
169 return crypto_register_template(&crypto_ecb_tmpl);
170}
171
172static void __exit crypto_ecb_module_exit(void)
173{
174 crypto_unregister_template(&crypto_ecb_tmpl);
175}
176
177module_init(crypto_ecb_module_init);
178module_exit(crypto_ecb_module_exit);
179
180MODULE_LICENSE("GPL");
181MODULE_DESCRIPTION("ECB block cipher algorithm");
diff --git a/crypto/hash.c b/crypto/hash.c
new file mode 100644
index 000000000000..cdec23d885fe
--- /dev/null
+++ b/crypto/hash.c
@@ -0,0 +1,61 @@
1/*
2 * Cryptographic Hash operations.
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 */
11
12#include <linux/errno.h>
13#include <linux/kernel.h>
14#include <linux/module.h>
15#include <linux/seq_file.h>
16
17#include "internal.h"
18
19static unsigned int crypto_hash_ctxsize(struct crypto_alg *alg)
20{
21 return alg->cra_ctxsize;
22}
23
24static int crypto_init_hash_ops(struct crypto_tfm *tfm)
25{
26 struct hash_tfm *crt = &tfm->crt_hash;
27 struct hash_alg *alg = &tfm->__crt_alg->cra_hash;
28
29 if (alg->digestsize > crypto_tfm_alg_blocksize(tfm))
30 return -EINVAL;
31
32 crt->init = alg->init;
33 crt->update = alg->update;
34 crt->final = alg->final;
35 crt->digest = alg->digest;
36 crt->setkey = alg->setkey;
37 crt->digestsize = alg->digestsize;
38
39 return 0;
40}
41
42static void crypto_hash_show(struct seq_file *m, struct crypto_alg *alg)
43 __attribute_used__;
44static void crypto_hash_show(struct seq_file *m, struct crypto_alg *alg)
45{
46 seq_printf(m, "type : hash\n");
47 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
48 seq_printf(m, "digestsize : %u\n", alg->cra_hash.digestsize);
49}
50
51const struct crypto_type crypto_hash_type = {
52 .ctxsize = crypto_hash_ctxsize,
53 .init = crypto_init_hash_ops,
54#ifdef CONFIG_PROC_FS
55 .show = crypto_hash_show,
56#endif
57};
58EXPORT_SYMBOL_GPL(crypto_hash_type);
59
60MODULE_LICENSE("GPL");
61MODULE_DESCRIPTION("Generic cryptographic hash type");
diff --git a/crypto/hmac.c b/crypto/hmac.c
index 46120dee5ada..f403b6946047 100644
--- a/crypto/hmac.c
+++ b/crypto/hmac.c
@@ -4,121 +4,249 @@
4 * HMAC: Keyed-Hashing for Message Authentication (RFC2104). 4 * HMAC: Keyed-Hashing for Message Authentication (RFC2104).
5 * 5 *
6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
7 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
7 * 8 *
8 * The HMAC implementation is derived from USAGI. 9 * The HMAC implementation is derived from USAGI.
9 * Copyright (c) 2002 Kazunori Miyazawa <miyazawa@linux-ipv6.org> / USAGI 10 * Copyright (c) 2002 Kazunori Miyazawa <miyazawa@linux-ipv6.org> / USAGI
10 * 11 *
11 * This program is free software; you can redistribute it and/or modify it 12 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free 13 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option) 14 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version. 15 * any later version.
15 * 16 *
16 */ 17 */
17#include <linux/crypto.h> 18
18#include <linux/mm.h> 19#include <crypto/algapi.h>
19#include <linux/highmem.h> 20#include <linux/err.h>
20#include <linux/slab.h> 21#include <linux/init.h>
22#include <linux/kernel.h>
23#include <linux/module.h>
21#include <linux/scatterlist.h> 24#include <linux/scatterlist.h>
22#include "internal.h" 25#include <linux/slab.h>
26#include <linux/string.h>
23 27
24static void hash_key(struct crypto_tfm *tfm, u8 *key, unsigned int keylen) 28struct hmac_ctx {
29 struct crypto_hash *child;
30};
31
32static inline void *align_ptr(void *p, unsigned int align)
25{ 33{
26 struct scatterlist tmp; 34 return (void *)ALIGN((unsigned long)p, align);
27
28 sg_set_buf(&tmp, key, keylen);
29 crypto_digest_digest(tfm, &tmp, 1, key);
30} 35}
31 36
32int crypto_alloc_hmac_block(struct crypto_tfm *tfm) 37static inline struct hmac_ctx *hmac_ctx(struct crypto_hash *tfm)
33{ 38{
34 int ret = 0; 39 return align_ptr(crypto_hash_ctx_aligned(tfm) +
40 crypto_hash_blocksize(tfm) * 2 +
41 crypto_hash_digestsize(tfm), sizeof(void *));
42}
43
44static int hmac_setkey(struct crypto_hash *parent,
45 const u8 *inkey, unsigned int keylen)
46{
47 int bs = crypto_hash_blocksize(parent);
48 int ds = crypto_hash_digestsize(parent);
49 char *ipad = crypto_hash_ctx_aligned(parent);
50 char *opad = ipad + bs;
51 char *digest = opad + bs;
52 struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *));
53 struct crypto_hash *tfm = ctx->child;
54 unsigned int i;
55
56 if (keylen > bs) {
57 struct hash_desc desc;
58 struct scatterlist tmp;
59 int err;
60
61 desc.tfm = tfm;
62 desc.flags = crypto_hash_get_flags(parent);
63 desc.flags &= CRYPTO_TFM_REQ_MAY_SLEEP;
64 sg_set_buf(&tmp, inkey, keylen);
35 65
36 BUG_ON(!crypto_tfm_alg_blocksize(tfm)); 66 err = crypto_hash_digest(&desc, &tmp, keylen, digest);
37 67 if (err)
38 tfm->crt_digest.dit_hmac_block = kmalloc(crypto_tfm_alg_blocksize(tfm), 68 return err;
39 GFP_KERNEL);
40 if (tfm->crt_digest.dit_hmac_block == NULL)
41 ret = -ENOMEM;
42 69
43 return ret; 70 inkey = digest;
44 71 keylen = ds;
72 }
73
74 memcpy(ipad, inkey, keylen);
75 memset(ipad + keylen, 0, bs - keylen);
76 memcpy(opad, ipad, bs);
77
78 for (i = 0; i < bs; i++) {
79 ipad[i] ^= 0x36;
80 opad[i] ^= 0x5c;
81 }
82
83 return 0;
45} 84}
46 85
47void crypto_free_hmac_block(struct crypto_tfm *tfm) 86static int hmac_init(struct hash_desc *pdesc)
48{ 87{
49 kfree(tfm->crt_digest.dit_hmac_block); 88 struct crypto_hash *parent = pdesc->tfm;
89 int bs = crypto_hash_blocksize(parent);
90 int ds = crypto_hash_digestsize(parent);
91 char *ipad = crypto_hash_ctx_aligned(parent);
92 struct hmac_ctx *ctx = align_ptr(ipad + bs * 2 + ds, sizeof(void *));
93 struct hash_desc desc;
94 struct scatterlist tmp;
95
96 desc.tfm = ctx->child;
97 desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
98 sg_set_buf(&tmp, ipad, bs);
99
100 return unlikely(crypto_hash_init(&desc)) ?:
101 crypto_hash_update(&desc, &tmp, 1);
50} 102}
51 103
52void crypto_hmac_init(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen) 104static int hmac_update(struct hash_desc *pdesc,
105 struct scatterlist *sg, unsigned int nbytes)
53{ 106{
54 unsigned int i; 107 struct hmac_ctx *ctx = hmac_ctx(pdesc->tfm);
108 struct hash_desc desc;
109
110 desc.tfm = ctx->child;
111 desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
112
113 return crypto_hash_update(&desc, sg, nbytes);
114}
115
116static int hmac_final(struct hash_desc *pdesc, u8 *out)
117{
118 struct crypto_hash *parent = pdesc->tfm;
119 int bs = crypto_hash_blocksize(parent);
120 int ds = crypto_hash_digestsize(parent);
121 char *opad = crypto_hash_ctx_aligned(parent) + bs;
122 char *digest = opad + bs;
123 struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *));
124 struct hash_desc desc;
55 struct scatterlist tmp; 125 struct scatterlist tmp;
56 char *ipad = tfm->crt_digest.dit_hmac_block;
57
58 if (*keylen > crypto_tfm_alg_blocksize(tfm)) {
59 hash_key(tfm, key, *keylen);
60 *keylen = crypto_tfm_alg_digestsize(tfm);
61 }
62 126
63 memset(ipad, 0, crypto_tfm_alg_blocksize(tfm)); 127 desc.tfm = ctx->child;
64 memcpy(ipad, key, *keylen); 128 desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
129 sg_set_buf(&tmp, opad, bs + ds);
65 130
66 for (i = 0; i < crypto_tfm_alg_blocksize(tfm); i++) 131 return unlikely(crypto_hash_final(&desc, digest)) ?:
67 ipad[i] ^= 0x36; 132 crypto_hash_digest(&desc, &tmp, bs + ds, out);
133}
68 134
69 sg_set_buf(&tmp, ipad, crypto_tfm_alg_blocksize(tfm)); 135static int hmac_digest(struct hash_desc *pdesc, struct scatterlist *sg,
70 136 unsigned int nbytes, u8 *out)
71 crypto_digest_init(tfm); 137{
72 crypto_digest_update(tfm, &tmp, 1); 138 struct crypto_hash *parent = pdesc->tfm;
139 int bs = crypto_hash_blocksize(parent);
140 int ds = crypto_hash_digestsize(parent);
141 char *ipad = crypto_hash_ctx_aligned(parent);
142 char *opad = ipad + bs;
143 char *digest = opad + bs;
144 struct hmac_ctx *ctx = align_ptr(digest + ds, sizeof(void *));
145 struct hash_desc desc;
146 struct scatterlist sg1[2];
147 struct scatterlist sg2[1];
148
149 desc.tfm = ctx->child;
150 desc.flags = pdesc->flags & CRYPTO_TFM_REQ_MAY_SLEEP;
151
152 sg_set_buf(sg1, ipad, bs);
153 sg1[1].page = (void *)sg;
154 sg1[1].length = 0;
155 sg_set_buf(sg2, opad, bs + ds);
156
157 return unlikely(crypto_hash_digest(&desc, sg1, nbytes + bs, digest)) ?:
158 crypto_hash_digest(&desc, sg2, bs + ds, out);
73} 159}
74 160
75void crypto_hmac_update(struct crypto_tfm *tfm, 161static int hmac_init_tfm(struct crypto_tfm *tfm)
76 struct scatterlist *sg, unsigned int nsg)
77{ 162{
78 crypto_digest_update(tfm, sg, nsg); 163 struct crypto_instance *inst = (void *)tfm->__crt_alg;
164 struct crypto_spawn *spawn = crypto_instance_ctx(inst);
165 struct hmac_ctx *ctx = hmac_ctx(__crypto_hash_cast(tfm));
166
167 tfm = crypto_spawn_tfm(spawn);
168 if (IS_ERR(tfm))
169 return PTR_ERR(tfm);
170
171 ctx->child = crypto_hash_cast(tfm);
172 return 0;
79} 173}
80 174
81void crypto_hmac_final(struct crypto_tfm *tfm, u8 *key, 175static void hmac_exit_tfm(struct crypto_tfm *tfm)
82 unsigned int *keylen, u8 *out)
83{ 176{
84 unsigned int i; 177 struct hmac_ctx *ctx = hmac_ctx(__crypto_hash_cast(tfm));
85 struct scatterlist tmp; 178 crypto_free_hash(ctx->child);
86 char *opad = tfm->crt_digest.dit_hmac_block; 179}
87
88 if (*keylen > crypto_tfm_alg_blocksize(tfm)) {
89 hash_key(tfm, key, *keylen);
90 *keylen = crypto_tfm_alg_digestsize(tfm);
91 }
92 180
93 crypto_digest_final(tfm, out); 181static void hmac_free(struct crypto_instance *inst)
182{
183 crypto_drop_spawn(crypto_instance_ctx(inst));
184 kfree(inst);
185}
94 186
95 memset(opad, 0, crypto_tfm_alg_blocksize(tfm)); 187static struct crypto_instance *hmac_alloc(void *param, unsigned int len)
96 memcpy(opad, key, *keylen); 188{
97 189 struct crypto_instance *inst;
98 for (i = 0; i < crypto_tfm_alg_blocksize(tfm); i++) 190 struct crypto_alg *alg;
99 opad[i] ^= 0x5c; 191
192 alg = crypto_get_attr_alg(param, len, CRYPTO_ALG_TYPE_HASH,
193 CRYPTO_ALG_TYPE_HASH_MASK | CRYPTO_ALG_ASYNC);
194 if (IS_ERR(alg))
195 return ERR_PTR(PTR_ERR(alg));
196
197 inst = crypto_alloc_instance("hmac", alg);
198 if (IS_ERR(inst))
199 goto out_put_alg;
200
201 inst->alg.cra_flags = CRYPTO_ALG_TYPE_HASH;
202 inst->alg.cra_priority = alg->cra_priority;
203 inst->alg.cra_blocksize = alg->cra_blocksize;
204 inst->alg.cra_alignmask = alg->cra_alignmask;
205 inst->alg.cra_type = &crypto_hash_type;
206
207 inst->alg.cra_hash.digestsize =
208 (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
209 CRYPTO_ALG_TYPE_HASH ? alg->cra_hash.digestsize :
210 alg->cra_digest.dia_digestsize;
211
212 inst->alg.cra_ctxsize = sizeof(struct hmac_ctx) +
213 ALIGN(inst->alg.cra_blocksize * 2 +
214 inst->alg.cra_hash.digestsize,
215 sizeof(void *));
216
217 inst->alg.cra_init = hmac_init_tfm;
218 inst->alg.cra_exit = hmac_exit_tfm;
219
220 inst->alg.cra_hash.init = hmac_init;
221 inst->alg.cra_hash.update = hmac_update;
222 inst->alg.cra_hash.final = hmac_final;
223 inst->alg.cra_hash.digest = hmac_digest;
224 inst->alg.cra_hash.setkey = hmac_setkey;
225
226out_put_alg:
227 crypto_mod_put(alg);
228 return inst;
229}
100 230
101 sg_set_buf(&tmp, opad, crypto_tfm_alg_blocksize(tfm)); 231static struct crypto_template hmac_tmpl = {
232 .name = "hmac",
233 .alloc = hmac_alloc,
234 .free = hmac_free,
235 .module = THIS_MODULE,
236};
102 237
103 crypto_digest_init(tfm); 238static int __init hmac_module_init(void)
104 crypto_digest_update(tfm, &tmp, 1); 239{
105 240 return crypto_register_template(&hmac_tmpl);
106 sg_set_buf(&tmp, out, crypto_tfm_alg_digestsize(tfm));
107
108 crypto_digest_update(tfm, &tmp, 1);
109 crypto_digest_final(tfm, out);
110} 241}
111 242
112void crypto_hmac(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen, 243static void __exit hmac_module_exit(void)
113 struct scatterlist *sg, unsigned int nsg, u8 *out)
114{ 244{
115 crypto_hmac_init(tfm, key, keylen); 245 crypto_unregister_template(&hmac_tmpl);
116 crypto_hmac_update(tfm, sg, nsg);
117 crypto_hmac_final(tfm, key, keylen, out);
118} 246}
119 247
120EXPORT_SYMBOL_GPL(crypto_hmac_init); 248module_init(hmac_module_init);
121EXPORT_SYMBOL_GPL(crypto_hmac_update); 249module_exit(hmac_module_exit);
122EXPORT_SYMBOL_GPL(crypto_hmac_final);
123EXPORT_SYMBOL_GPL(crypto_hmac);
124 250
251MODULE_LICENSE("GPL");
252MODULE_DESCRIPTION("HMAC hash algorithm");
diff --git a/crypto/internal.h b/crypto/internal.h
index 959e602909a6..2da6ad4f3593 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -12,19 +12,43 @@
12 */ 12 */
13#ifndef _CRYPTO_INTERNAL_H 13#ifndef _CRYPTO_INTERNAL_H
14#define _CRYPTO_INTERNAL_H 14#define _CRYPTO_INTERNAL_H
15#include <linux/crypto.h> 15
16#include <crypto/algapi.h>
17#include <linux/completion.h>
16#include <linux/mm.h> 18#include <linux/mm.h>
17#include <linux/highmem.h> 19#include <linux/highmem.h>
18#include <linux/interrupt.h> 20#include <linux/interrupt.h>
19#include <linux/init.h> 21#include <linux/init.h>
20#include <linux/list.h> 22#include <linux/list.h>
23#include <linux/module.h>
21#include <linux/kernel.h> 24#include <linux/kernel.h>
25#include <linux/notifier.h>
22#include <linux/rwsem.h> 26#include <linux/rwsem.h>
23#include <linux/slab.h> 27#include <linux/slab.h>
24#include <asm/kmap_types.h> 28#include <asm/kmap_types.h>
25 29
30/* Crypto notification events. */
31enum {
32 CRYPTO_MSG_ALG_REQUEST,
33 CRYPTO_MSG_ALG_REGISTER,
34 CRYPTO_MSG_ALG_UNREGISTER,
35 CRYPTO_MSG_TMPL_REGISTER,
36 CRYPTO_MSG_TMPL_UNREGISTER,
37};
38
39struct crypto_instance;
40struct crypto_template;
41
42struct crypto_larval {
43 struct crypto_alg alg;
44 struct crypto_alg *adult;
45 struct completion completion;
46 u32 mask;
47};
48
26extern struct list_head crypto_alg_list; 49extern struct list_head crypto_alg_list;
27extern struct rw_semaphore crypto_alg_sem; 50extern struct rw_semaphore crypto_alg_sem;
51extern struct blocking_notifier_head crypto_chain;
28 52
29extern enum km_type crypto_km_types[]; 53extern enum km_type crypto_km_types[];
30 54
@@ -43,36 +67,33 @@ static inline void crypto_kunmap(void *vaddr, int out)
43 kunmap_atomic(vaddr, crypto_kmap_type(out)); 67 kunmap_atomic(vaddr, crypto_kmap_type(out));
44} 68}
45 69
46static inline void crypto_yield(struct crypto_tfm *tfm) 70static inline void crypto_yield(u32 flags)
47{ 71{
48 if (tfm->crt_flags & CRYPTO_TFM_REQ_MAY_SLEEP) 72 if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
49 cond_resched(); 73 cond_resched();
50} 74}
51 75
52#ifdef CONFIG_CRYPTO_HMAC
53int crypto_alloc_hmac_block(struct crypto_tfm *tfm);
54void crypto_free_hmac_block(struct crypto_tfm *tfm);
55#else
56static inline int crypto_alloc_hmac_block(struct crypto_tfm *tfm)
57{
58 return 0;
59}
60
61static inline void crypto_free_hmac_block(struct crypto_tfm *tfm)
62{ }
63#endif
64
65#ifdef CONFIG_PROC_FS 76#ifdef CONFIG_PROC_FS
66void __init crypto_init_proc(void); 77void __init crypto_init_proc(void);
78void __exit crypto_exit_proc(void);
67#else 79#else
68static inline void crypto_init_proc(void) 80static inline void crypto_init_proc(void)
69{ } 81{ }
82static inline void crypto_exit_proc(void)
83{ }
70#endif 84#endif
71 85
72static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg, 86static inline unsigned int crypto_digest_ctxsize(struct crypto_alg *alg,
73 int flags) 87 int flags)
74{ 88{
75 return alg->cra_ctxsize; 89 unsigned int len = alg->cra_ctxsize;
90
91 if (alg->cra_alignmask) {
92 len = ALIGN(len, (unsigned long)alg->cra_alignmask + 1);
93 len += alg->cra_digest.dia_digestsize;
94 }
95
96 return len;
76} 97}
77 98
78static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg, 99static inline unsigned int crypto_cipher_ctxsize(struct crypto_alg *alg,
@@ -96,6 +117,10 @@ static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg,
96 return alg->cra_ctxsize; 117 return alg->cra_ctxsize;
97} 118}
98 119
120struct crypto_alg *crypto_mod_get(struct crypto_alg *alg);
121struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask);
122struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask);
123
99int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags); 124int crypto_init_digest_flags(struct crypto_tfm *tfm, u32 flags);
100int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags); 125int crypto_init_cipher_flags(struct crypto_tfm *tfm, u32 flags);
101int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags); 126int crypto_init_compress_flags(struct crypto_tfm *tfm, u32 flags);
@@ -108,5 +133,52 @@ void crypto_exit_digest_ops(struct crypto_tfm *tfm);
108void crypto_exit_cipher_ops(struct crypto_tfm *tfm); 133void crypto_exit_cipher_ops(struct crypto_tfm *tfm);
109void crypto_exit_compress_ops(struct crypto_tfm *tfm); 134void crypto_exit_compress_ops(struct crypto_tfm *tfm);
110 135
136void crypto_larval_error(const char *name, u32 type, u32 mask);
137
138void crypto_shoot_alg(struct crypto_alg *alg);
139struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 flags);
140
141int crypto_register_instance(struct crypto_template *tmpl,
142 struct crypto_instance *inst);
143
144int crypto_register_notifier(struct notifier_block *nb);
145int crypto_unregister_notifier(struct notifier_block *nb);
146
147static inline void crypto_alg_put(struct crypto_alg *alg)
148{
149 if (atomic_dec_and_test(&alg->cra_refcnt) && alg->cra_destroy)
150 alg->cra_destroy(alg);
151}
152
153static inline int crypto_tmpl_get(struct crypto_template *tmpl)
154{
155 return try_module_get(tmpl->module);
156}
157
158static inline void crypto_tmpl_put(struct crypto_template *tmpl)
159{
160 module_put(tmpl->module);
161}
162
163static inline int crypto_is_larval(struct crypto_alg *alg)
164{
165 return alg->cra_flags & CRYPTO_ALG_LARVAL;
166}
167
168static inline int crypto_is_dead(struct crypto_alg *alg)
169{
170 return alg->cra_flags & CRYPTO_ALG_DEAD;
171}
172
173static inline int crypto_is_moribund(struct crypto_alg *alg)
174{
175 return alg->cra_flags & (CRYPTO_ALG_DEAD | CRYPTO_ALG_DYING);
176}
177
178static inline int crypto_notify(unsigned long val, void *v)
179{
180 return blocking_notifier_call_chain(&crypto_chain, val, v);
181}
182
111#endif /* _CRYPTO_INTERNAL_H */ 183#endif /* _CRYPTO_INTERNAL_H */
112 184
diff --git a/crypto/khazad.c b/crypto/khazad.c
index d4c9d3657b36..9fa24a2dd6ff 100644
--- a/crypto/khazad.c
+++ b/crypto/khazad.c
@@ -755,19 +755,13 @@ static const u64 c[KHAZAD_ROUNDS + 1] = {
755}; 755};
756 756
757static int khazad_setkey(struct crypto_tfm *tfm, const u8 *in_key, 757static int khazad_setkey(struct crypto_tfm *tfm, const u8 *in_key,
758 unsigned int key_len, u32 *flags) 758 unsigned int key_len)
759{ 759{
760 struct khazad_ctx *ctx = crypto_tfm_ctx(tfm); 760 struct khazad_ctx *ctx = crypto_tfm_ctx(tfm);
761 const __be32 *key = (const __be32 *)in_key; 761 const __be32 *key = (const __be32 *)in_key;
762 int r; 762 int r;
763 const u64 *S = T7; 763 const u64 *S = T7;
764 u64 K2, K1; 764 u64 K2, K1;
765
766 if (key_len != 16)
767 {
768 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
769 return -EINVAL;
770 }
771 765
772 /* key is supposed to be 32-bit aligned */ 766 /* key is supposed to be 32-bit aligned */
773 K2 = ((u64)be32_to_cpu(key[0]) << 32) | be32_to_cpu(key[1]); 767 K2 = ((u64)be32_to_cpu(key[0]) << 32) | be32_to_cpu(key[1]);
diff --git a/crypto/michael_mic.c b/crypto/michael_mic.c
index d061da21cfda..094397b48849 100644
--- a/crypto/michael_mic.c
+++ b/crypto/michael_mic.c
@@ -123,14 +123,13 @@ static void michael_final(struct crypto_tfm *tfm, u8 *out)
123 123
124 124
125static int michael_setkey(struct crypto_tfm *tfm, const u8 *key, 125static int michael_setkey(struct crypto_tfm *tfm, const u8 *key,
126 unsigned int keylen, u32 *flags) 126 unsigned int keylen)
127{ 127{
128 struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); 128 struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm);
129 const __le32 *data = (const __le32 *)key; 129 const __le32 *data = (const __le32 *)key;
130 130
131 if (keylen != 8) { 131 if (keylen != 8) {
132 if (flags) 132 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
133 *flags = CRYPTO_TFM_RES_BAD_KEY_LEN;
134 return -EINVAL; 133 return -EINVAL;
135 } 134 }
136 135
diff --git a/crypto/proc.c b/crypto/proc.c
index c0a5dd7ce2cc..dabce0676f63 100644
--- a/crypto/proc.c
+++ b/crypto/proc.c
@@ -12,6 +12,8 @@
12 * any later version. 12 * any later version.
13 * 13 *
14 */ 14 */
15
16#include <asm/atomic.h>
15#include <linux/init.h> 17#include <linux/init.h>
16#include <linux/crypto.h> 18#include <linux/crypto.h>
17#include <linux/rwsem.h> 19#include <linux/rwsem.h>
@@ -54,6 +56,7 @@ static int c_show(struct seq_file *m, void *p)
54 seq_printf(m, "driver : %s\n", alg->cra_driver_name); 56 seq_printf(m, "driver : %s\n", alg->cra_driver_name);
55 seq_printf(m, "module : %s\n", module_name(alg->cra_module)); 57 seq_printf(m, "module : %s\n", module_name(alg->cra_module));
56 seq_printf(m, "priority : %d\n", alg->cra_priority); 58 seq_printf(m, "priority : %d\n", alg->cra_priority);
59 seq_printf(m, "refcnt : %d\n", atomic_read(&alg->cra_refcnt));
57 60
58 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { 61 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
59 case CRYPTO_ALG_TYPE_CIPHER: 62 case CRYPTO_ALG_TYPE_CIPHER:
@@ -75,7 +78,10 @@ static int c_show(struct seq_file *m, void *p)
75 seq_printf(m, "type : compression\n"); 78 seq_printf(m, "type : compression\n");
76 break; 79 break;
77 default: 80 default:
78 seq_printf(m, "type : unknown\n"); 81 if (alg->cra_type && alg->cra_type->show)
82 alg->cra_type->show(m, alg);
83 else
84 seq_printf(m, "type : unknown\n");
79 break; 85 break;
80 } 86 }
81 87
@@ -110,3 +116,8 @@ void __init crypto_init_proc(void)
110 if (proc) 116 if (proc)
111 proc->proc_fops = &proc_crypto_ops; 117 proc->proc_fops = &proc_crypto_ops;
112} 118}
119
120void __exit crypto_exit_proc(void)
121{
122 remove_proc_entry("crypto", NULL);
123}
diff --git a/crypto/scatterwalk.c b/crypto/scatterwalk.c
index 2953e2cc56f0..35172d3f043b 100644
--- a/crypto/scatterwalk.c
+++ b/crypto/scatterwalk.c
@@ -15,9 +15,11 @@
15 */ 15 */
16#include <linux/kernel.h> 16#include <linux/kernel.h>
17#include <linux/mm.h> 17#include <linux/mm.h>
18#include <linux/module.h>
18#include <linux/pagemap.h> 19#include <linux/pagemap.h>
19#include <linux/highmem.h> 20#include <linux/highmem.h>
20#include <asm/scatterlist.h> 21#include <linux/scatterlist.h>
22
21#include "internal.h" 23#include "internal.h"
22#include "scatterwalk.h" 24#include "scatterwalk.h"
23 25
@@ -27,88 +29,77 @@ enum km_type crypto_km_types[] = {
27 KM_SOFTIRQ0, 29 KM_SOFTIRQ0,
28 KM_SOFTIRQ1, 30 KM_SOFTIRQ1,
29}; 31};
32EXPORT_SYMBOL_GPL(crypto_km_types);
30 33
31static void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out) 34static inline void memcpy_dir(void *buf, void *sgdata, size_t nbytes, int out)
32{ 35{
33 if (out) 36 void *src = out ? buf : sgdata;
34 memcpy(sgdata, buf, nbytes); 37 void *dst = out ? sgdata : buf;
35 else 38
36 memcpy(buf, sgdata, nbytes); 39 memcpy(dst, src, nbytes);
37} 40}
38 41
39void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg) 42void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg)
40{ 43{
41 unsigned int rest_of_page;
42
43 walk->sg = sg; 44 walk->sg = sg;
44 45
45 walk->page = sg->page;
46 walk->len_this_segment = sg->length;
47
48 BUG_ON(!sg->length); 46 BUG_ON(!sg->length);
49 47
50 rest_of_page = PAGE_CACHE_SIZE - (sg->offset & (PAGE_CACHE_SIZE - 1));
51 walk->len_this_page = min(sg->length, rest_of_page);
52 walk->offset = sg->offset; 48 walk->offset = sg->offset;
53} 49}
50EXPORT_SYMBOL_GPL(scatterwalk_start);
54 51
55void scatterwalk_map(struct scatter_walk *walk, int out) 52void *scatterwalk_map(struct scatter_walk *walk, int out)
56{
57 walk->data = crypto_kmap(walk->page, out) + walk->offset;
58}
59
60static inline void scatterwalk_unmap(struct scatter_walk *walk, int out)
61{ 53{
62 /* walk->data may be pointing the first byte of the next page; 54 return crypto_kmap(scatterwalk_page(walk), out) +
63 however, we know we transfered at least one byte. So, 55 offset_in_page(walk->offset);
64 walk->data - 1 will be a virtual address in the mapped page. */
65 crypto_kunmap(walk->data - 1, out);
66} 56}
57EXPORT_SYMBOL_GPL(scatterwalk_map);
67 58
68static void scatterwalk_pagedone(struct scatter_walk *walk, int out, 59static void scatterwalk_pagedone(struct scatter_walk *walk, int out,
69 unsigned int more) 60 unsigned int more)
70{ 61{
71 if (out) 62 if (out)
72 flush_dcache_page(walk->page); 63 flush_dcache_page(scatterwalk_page(walk));
73 64
74 if (more) { 65 if (more) {
75 walk->len_this_segment -= walk->len_this_page; 66 walk->offset += PAGE_SIZE - 1;
76 67 walk->offset &= PAGE_MASK;
77 if (walk->len_this_segment) { 68 if (walk->offset >= walk->sg->offset + walk->sg->length)
78 walk->page++;
79 walk->len_this_page = min(walk->len_this_segment,
80 (unsigned)PAGE_CACHE_SIZE);
81 walk->offset = 0;
82 }
83 else
84 scatterwalk_start(walk, sg_next(walk->sg)); 69 scatterwalk_start(walk, sg_next(walk->sg));
85 } 70 }
86} 71}
87 72
88void scatterwalk_done(struct scatter_walk *walk, int out, int more) 73void scatterwalk_done(struct scatter_walk *walk, int out, int more)
89{ 74{
90 scatterwalk_unmap(walk, out); 75 if (!offset_in_page(walk->offset) || !more)
91 if (walk->len_this_page == 0 || !more)
92 scatterwalk_pagedone(walk, out, more); 76 scatterwalk_pagedone(walk, out, more);
93} 77}
78EXPORT_SYMBOL_GPL(scatterwalk_done);
94 79
95/* 80void scatterwalk_copychunks(void *buf, struct scatter_walk *walk,
96 * Do not call this unless the total length of all of the fragments 81 size_t nbytes, int out)
97 * has been verified as multiple of the block size.
98 */
99int scatterwalk_copychunks(void *buf, struct scatter_walk *walk,
100 size_t nbytes, int out)
101{ 82{
102 while (nbytes > walk->len_this_page) { 83 for (;;) {
103 memcpy_dir(buf, walk->data, walk->len_this_page, out); 84 unsigned int len_this_page = scatterwalk_pagelen(walk);
104 buf += walk->len_this_page; 85 u8 *vaddr;
105 nbytes -= walk->len_this_page; 86
87 if (len_this_page > nbytes)
88 len_this_page = nbytes;
89
90 vaddr = scatterwalk_map(walk, out);
91 memcpy_dir(buf, vaddr, len_this_page, out);
92 scatterwalk_unmap(vaddr, out);
93
94 if (nbytes == len_this_page)
95 break;
96
97 buf += len_this_page;
98 nbytes -= len_this_page;
106 99
107 scatterwalk_unmap(walk, out);
108 scatterwalk_pagedone(walk, out, 1); 100 scatterwalk_pagedone(walk, out, 1);
109 scatterwalk_map(walk, out);
110 } 101 }
111 102
112 memcpy_dir(buf, walk->data, nbytes, out); 103 scatterwalk_advance(walk, nbytes);
113 return nbytes;
114} 104}
105EXPORT_SYMBOL_GPL(scatterwalk_copychunks);
diff --git a/crypto/scatterwalk.h b/crypto/scatterwalk.h
index e79925c474a3..f1592cc2d0f4 100644
--- a/crypto/scatterwalk.h
+++ b/crypto/scatterwalk.h
@@ -14,45 +14,42 @@
14 14
15#ifndef _CRYPTO_SCATTERWALK_H 15#ifndef _CRYPTO_SCATTERWALK_H
16#define _CRYPTO_SCATTERWALK_H 16#define _CRYPTO_SCATTERWALK_H
17
17#include <linux/mm.h> 18#include <linux/mm.h>
18#include <asm/scatterlist.h> 19#include <linux/scatterlist.h>
19 20
20struct scatter_walk { 21#include "internal.h"
21 struct scatterlist *sg;
22 struct page *page;
23 void *data;
24 unsigned int len_this_page;
25 unsigned int len_this_segment;
26 unsigned int offset;
27};
28 22
29/* Define sg_next is an inline routine now in case we want to change
30 scatterlist to a linked list later. */
31static inline struct scatterlist *sg_next(struct scatterlist *sg) 23static inline struct scatterlist *sg_next(struct scatterlist *sg)
32{ 24{
33 return sg + 1; 25 return (++sg)->length ? sg : (void *)sg->page;
34} 26}
35 27
36static inline int scatterwalk_samebuf(struct scatter_walk *walk_in, 28static inline unsigned long scatterwalk_samebuf(struct scatter_walk *walk_in,
37 struct scatter_walk *walk_out) 29 struct scatter_walk *walk_out)
38{ 30{
39 return walk_in->page == walk_out->page && 31 return !(((walk_in->sg->page - walk_out->sg->page) << PAGE_SHIFT) +
40 walk_in->offset == walk_out->offset; 32 (int)(walk_in->offset - walk_out->offset));
33}
34
35static inline unsigned int scatterwalk_pagelen(struct scatter_walk *walk)
36{
37 unsigned int len = walk->sg->offset + walk->sg->length - walk->offset;
38 unsigned int len_this_page = offset_in_page(~walk->offset) + 1;
39 return len_this_page > len ? len : len_this_page;
41} 40}
42 41
43static inline unsigned int scatterwalk_clamp(struct scatter_walk *walk, 42static inline unsigned int scatterwalk_clamp(struct scatter_walk *walk,
44 unsigned int nbytes) 43 unsigned int nbytes)
45{ 44{
46 return nbytes > walk->len_this_page ? walk->len_this_page : nbytes; 45 unsigned int len_this_page = scatterwalk_pagelen(walk);
46 return nbytes > len_this_page ? len_this_page : nbytes;
47} 47}
48 48
49static inline void scatterwalk_advance(struct scatter_walk *walk, 49static inline void scatterwalk_advance(struct scatter_walk *walk,
50 unsigned int nbytes) 50 unsigned int nbytes)
51{ 51{
52 walk->data += nbytes;
53 walk->offset += nbytes; 52 walk->offset += nbytes;
54 walk->len_this_page -= nbytes;
55 walk->len_this_segment -= nbytes;
56} 53}
57 54
58static inline unsigned int scatterwalk_aligned(struct scatter_walk *walk, 55static inline unsigned int scatterwalk_aligned(struct scatter_walk *walk,
@@ -61,9 +58,20 @@ static inline unsigned int scatterwalk_aligned(struct scatter_walk *walk,
61 return !(walk->offset & alignmask); 58 return !(walk->offset & alignmask);
62} 59}
63 60
61static inline struct page *scatterwalk_page(struct scatter_walk *walk)
62{
63 return walk->sg->page + (walk->offset >> PAGE_SHIFT);
64}
65
66static inline void scatterwalk_unmap(void *vaddr, int out)
67{
68 crypto_kunmap(vaddr, out);
69}
70
64void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg); 71void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg);
65int scatterwalk_copychunks(void *buf, struct scatter_walk *walk, size_t nbytes, int out); 72void scatterwalk_copychunks(void *buf, struct scatter_walk *walk,
66void scatterwalk_map(struct scatter_walk *walk, int out); 73 size_t nbytes, int out);
74void *scatterwalk_map(struct scatter_walk *walk, int out);
67void scatterwalk_done(struct scatter_walk *walk, int out, int more); 75void scatterwalk_done(struct scatter_walk *walk, int out, int more);
68 76
69#endif /* _CRYPTO_SCATTERWALK_H */ 77#endif /* _CRYPTO_SCATTERWALK_H */
diff --git a/crypto/serpent.c b/crypto/serpent.c
index de60cdddbf4a..465d091cd3ec 100644
--- a/crypto/serpent.c
+++ b/crypto/serpent.c
@@ -216,7 +216,7 @@ struct serpent_ctx {
216 216
217 217
218static int serpent_setkey(struct crypto_tfm *tfm, const u8 *key, 218static int serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
219 unsigned int keylen, u32 *flags) 219 unsigned int keylen)
220{ 220{
221 struct serpent_ctx *ctx = crypto_tfm_ctx(tfm); 221 struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
222 u32 *k = ctx->expkey; 222 u32 *k = ctx->expkey;
@@ -224,13 +224,6 @@ static int serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
224 u32 r0,r1,r2,r3,r4; 224 u32 r0,r1,r2,r3,r4;
225 int i; 225 int i;
226 226
227 if ((keylen < SERPENT_MIN_KEY_SIZE)
228 || (keylen > SERPENT_MAX_KEY_SIZE))
229 {
230 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
231 return -EINVAL;
232 }
233
234 /* Copy key, add padding */ 227 /* Copy key, add padding */
235 228
236 for (i = 0; i < keylen; ++i) 229 for (i = 0; i < keylen; ++i)
@@ -497,21 +490,15 @@ static struct crypto_alg serpent_alg = {
497}; 490};
498 491
499static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key, 492static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key,
500 unsigned int keylen, u32 *flags) 493 unsigned int keylen)
501{ 494{
502 u8 rev_key[SERPENT_MAX_KEY_SIZE]; 495 u8 rev_key[SERPENT_MAX_KEY_SIZE];
503 int i; 496 int i;
504 497
505 if ((keylen < SERPENT_MIN_KEY_SIZE)
506 || (keylen > SERPENT_MAX_KEY_SIZE)) {
507 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
508 return -EINVAL;
509 }
510
511 for (i = 0; i < keylen; ++i) 498 for (i = 0; i < keylen; ++i)
512 rev_key[keylen - i - 1] = key[i]; 499 rev_key[keylen - i - 1] = key[i];
513 500
514 return serpent_setkey(tfm, rev_key, keylen, flags); 501 return serpent_setkey(tfm, rev_key, keylen);
515} 502}
516 503
517static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 504static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
diff --git a/crypto/sha1.c b/crypto/sha1.c
index 6c77b689f87e..1bba551e5b45 100644
--- a/crypto/sha1.c
+++ b/crypto/sha1.c
@@ -109,6 +109,7 @@ static void sha1_final(struct crypto_tfm *tfm, u8 *out)
109 109
110static struct crypto_alg alg = { 110static struct crypto_alg alg = {
111 .cra_name = "sha1", 111 .cra_name = "sha1",
112 .cra_driver_name= "sha1-generic",
112 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 113 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
113 .cra_blocksize = SHA1_HMAC_BLOCK_SIZE, 114 .cra_blocksize = SHA1_HMAC_BLOCK_SIZE,
114 .cra_ctxsize = sizeof(struct sha1_ctx), 115 .cra_ctxsize = sizeof(struct sha1_ctx),
@@ -137,3 +138,5 @@ module_exit(fini);
137 138
138MODULE_LICENSE("GPL"); 139MODULE_LICENSE("GPL");
139MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm"); 140MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm");
141
142MODULE_ALIAS("sha1-generic");
diff --git a/crypto/sha256.c b/crypto/sha256.c
index bc71d85a7d02..716195bb54f2 100644
--- a/crypto/sha256.c
+++ b/crypto/sha256.c
@@ -309,6 +309,7 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out)
309 309
310static struct crypto_alg alg = { 310static struct crypto_alg alg = {
311 .cra_name = "sha256", 311 .cra_name = "sha256",
312 .cra_driver_name= "sha256-generic",
312 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 313 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
313 .cra_blocksize = SHA256_HMAC_BLOCK_SIZE, 314 .cra_blocksize = SHA256_HMAC_BLOCK_SIZE,
314 .cra_ctxsize = sizeof(struct sha256_ctx), 315 .cra_ctxsize = sizeof(struct sha256_ctx),
@@ -337,3 +338,5 @@ module_exit(fini);
337 338
338MODULE_LICENSE("GPL"); 339MODULE_LICENSE("GPL");
339MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm"); 340MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm");
341
342MODULE_ALIAS("sha256-generic");
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
index e52f56c5bd5e..83307420d31c 100644
--- a/crypto/tcrypt.c
+++ b/crypto/tcrypt.c
@@ -17,6 +17,7 @@
17 * 17 *
18 */ 18 */
19 19
20#include <linux/err.h>
20#include <linux/init.h> 21#include <linux/init.h>
21#include <linux/module.h> 22#include <linux/module.h>
22#include <linux/mm.h> 23#include <linux/mm.h>
@@ -54,8 +55,6 @@
54*/ 55*/
55#define ENCRYPT 1 56#define ENCRYPT 1
56#define DECRYPT 0 57#define DECRYPT 0
57#define MODE_ECB 1
58#define MODE_CBC 0
59 58
60static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 59static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
61 60
@@ -89,9 +88,11 @@ static void test_hash(char *algo, struct hash_testvec *template,
89 unsigned int i, j, k, temp; 88 unsigned int i, j, k, temp;
90 struct scatterlist sg[8]; 89 struct scatterlist sg[8];
91 char result[64]; 90 char result[64];
92 struct crypto_tfm *tfm; 91 struct crypto_hash *tfm;
92 struct hash_desc desc;
93 struct hash_testvec *hash_tv; 93 struct hash_testvec *hash_tv;
94 unsigned int tsize; 94 unsigned int tsize;
95 int ret;
95 96
96 printk("\ntesting %s\n", algo); 97 printk("\ntesting %s\n", algo);
97 98
@@ -105,30 +106,42 @@ static void test_hash(char *algo, struct hash_testvec *template,
105 106
106 memcpy(tvmem, template, tsize); 107 memcpy(tvmem, template, tsize);
107 hash_tv = (void *)tvmem; 108 hash_tv = (void *)tvmem;
108 tfm = crypto_alloc_tfm(algo, 0); 109
109 if (tfm == NULL) { 110 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
110 printk("failed to load transform for %s\n", algo); 111 if (IS_ERR(tfm)) {
112 printk("failed to load transform for %s: %ld\n", algo,
113 PTR_ERR(tfm));
111 return; 114 return;
112 } 115 }
113 116
117 desc.tfm = tfm;
118 desc.flags = 0;
119
114 for (i = 0; i < tcount; i++) { 120 for (i = 0; i < tcount; i++) {
115 printk("test %u:\n", i + 1); 121 printk("test %u:\n", i + 1);
116 memset(result, 0, 64); 122 memset(result, 0, 64);
117 123
118 sg_set_buf(&sg[0], hash_tv[i].plaintext, hash_tv[i].psize); 124 sg_set_buf(&sg[0], hash_tv[i].plaintext, hash_tv[i].psize);
119 125
120 crypto_digest_init(tfm); 126 if (hash_tv[i].ksize) {
121 if (tfm->crt_u.digest.dit_setkey) { 127 ret = crypto_hash_setkey(tfm, hash_tv[i].key,
122 crypto_digest_setkey(tfm, hash_tv[i].key, 128 hash_tv[i].ksize);
123 hash_tv[i].ksize); 129 if (ret) {
130 printk("setkey() failed ret=%d\n", ret);
131 goto out;
132 }
133 }
134
135 ret = crypto_hash_digest(&desc, sg, hash_tv[i].psize, result);
136 if (ret) {
137 printk("digest () failed ret=%d\n", ret);
138 goto out;
124 } 139 }
125 crypto_digest_update(tfm, sg, 1);
126 crypto_digest_final(tfm, result);
127 140
128 hexdump(result, crypto_tfm_alg_digestsize(tfm)); 141 hexdump(result, crypto_hash_digestsize(tfm));
129 printk("%s\n", 142 printk("%s\n",
130 memcmp(result, hash_tv[i].digest, 143 memcmp(result, hash_tv[i].digest,
131 crypto_tfm_alg_digestsize(tfm)) ? 144 crypto_hash_digestsize(tfm)) ?
132 "fail" : "pass"); 145 "fail" : "pass");
133 } 146 }
134 147
@@ -154,127 +167,56 @@ static void test_hash(char *algo, struct hash_testvec *template,
154 hash_tv[i].tap[k]); 167 hash_tv[i].tap[k]);
155 } 168 }
156 169
157 crypto_digest_digest(tfm, sg, hash_tv[i].np, result); 170 if (hash_tv[i].ksize) {
158 171 ret = crypto_hash_setkey(tfm, hash_tv[i].key,
159 hexdump(result, crypto_tfm_alg_digestsize(tfm)); 172 hash_tv[i].ksize);
160 printk("%s\n",
161 memcmp(result, hash_tv[i].digest,
162 crypto_tfm_alg_digestsize(tfm)) ?
163 "fail" : "pass");
164 }
165 }
166
167 crypto_free_tfm(tfm);
168}
169
170
171#ifdef CONFIG_CRYPTO_HMAC
172
173static void test_hmac(char *algo, struct hmac_testvec *template,
174 unsigned int tcount)
175{
176 unsigned int i, j, k, temp;
177 struct scatterlist sg[8];
178 char result[64];
179 struct crypto_tfm *tfm;
180 struct hmac_testvec *hmac_tv;
181 unsigned int tsize, klen;
182
183 tfm = crypto_alloc_tfm(algo, 0);
184 if (tfm == NULL) {
185 printk("failed to load transform for %s\n", algo);
186 return;
187 }
188
189 printk("\ntesting hmac_%s\n", algo);
190
191 tsize = sizeof(struct hmac_testvec);
192 tsize *= tcount;
193 if (tsize > TVMEMSIZE) {
194 printk("template (%u) too big for tvmem (%u)\n", tsize,
195 TVMEMSIZE);
196 goto out;
197 }
198
199 memcpy(tvmem, template, tsize);
200 hmac_tv = (void *)tvmem;
201
202 for (i = 0; i < tcount; i++) {
203 printk("test %u:\n", i + 1);
204 memset(result, 0, sizeof (result));
205
206 klen = hmac_tv[i].ksize;
207 sg_set_buf(&sg[0], hmac_tv[i].plaintext, hmac_tv[i].psize);
208
209 crypto_hmac(tfm, hmac_tv[i].key, &klen, sg, 1, result);
210 173
211 hexdump(result, crypto_tfm_alg_digestsize(tfm)); 174 if (ret) {
212 printk("%s\n", 175 printk("setkey() failed ret=%d\n", ret);
213 memcmp(result, hmac_tv[i].digest, 176 goto out;
214 crypto_tfm_alg_digestsize(tfm)) ? "fail" : 177 }
215 "pass");
216 }
217
218 printk("\ntesting hmac_%s across pages\n", algo);
219
220 memset(xbuf, 0, XBUFSIZE);
221
222 j = 0;
223 for (i = 0; i < tcount; i++) {
224 if (hmac_tv[i].np) {
225 j++;
226 printk("test %u:\n",j);
227 memset(result, 0, 64);
228
229 temp = 0;
230 klen = hmac_tv[i].ksize;
231 for (k = 0; k < hmac_tv[i].np; k++) {
232 memcpy(&xbuf[IDX[k]],
233 hmac_tv[i].plaintext + temp,
234 hmac_tv[i].tap[k]);
235 temp += hmac_tv[i].tap[k];
236 sg_set_buf(&sg[k], &xbuf[IDX[k]],
237 hmac_tv[i].tap[k]);
238 } 178 }
239 179
240 crypto_hmac(tfm, hmac_tv[i].key, &klen, sg, 180 ret = crypto_hash_digest(&desc, sg, hash_tv[i].psize,
241 hmac_tv[i].np, result); 181 result);
242 hexdump(result, crypto_tfm_alg_digestsize(tfm)); 182 if (ret) {
183 printk("digest () failed ret=%d\n", ret);
184 goto out;
185 }
243 186
187 hexdump(result, crypto_hash_digestsize(tfm));
244 printk("%s\n", 188 printk("%s\n",
245 memcmp(result, hmac_tv[i].digest, 189 memcmp(result, hash_tv[i].digest,
246 crypto_tfm_alg_digestsize(tfm)) ? 190 crypto_hash_digestsize(tfm)) ?
247 "fail" : "pass"); 191 "fail" : "pass");
248 } 192 }
249 } 193 }
194
250out: 195out:
251 crypto_free_tfm(tfm); 196 crypto_free_hash(tfm);
252} 197}
253 198
254#endif /* CONFIG_CRYPTO_HMAC */ 199static void test_cipher(char *algo, int enc,
255
256static void test_cipher(char *algo, int mode, int enc,
257 struct cipher_testvec *template, unsigned int tcount) 200 struct cipher_testvec *template, unsigned int tcount)
258{ 201{
259 unsigned int ret, i, j, k, temp; 202 unsigned int ret, i, j, k, temp;
260 unsigned int tsize; 203 unsigned int tsize;
204 unsigned int iv_len;
205 unsigned int len;
261 char *q; 206 char *q;
262 struct crypto_tfm *tfm; 207 struct crypto_blkcipher *tfm;
263 char *key; 208 char *key;
264 struct cipher_testvec *cipher_tv; 209 struct cipher_testvec *cipher_tv;
210 struct blkcipher_desc desc;
265 struct scatterlist sg[8]; 211 struct scatterlist sg[8];
266 const char *e, *m; 212 const char *e;
267 213
268 if (enc == ENCRYPT) 214 if (enc == ENCRYPT)
269 e = "encryption"; 215 e = "encryption";
270 else 216 else
271 e = "decryption"; 217 e = "decryption";
272 if (mode == MODE_ECB)
273 m = "ECB";
274 else
275 m = "CBC";
276 218
277 printk("\ntesting %s %s %s\n", algo, m, e); 219 printk("\ntesting %s %s\n", algo, e);
278 220
279 tsize = sizeof (struct cipher_testvec); 221 tsize = sizeof (struct cipher_testvec);
280 tsize *= tcount; 222 tsize *= tcount;
@@ -288,15 +230,15 @@ static void test_cipher(char *algo, int mode, int enc,
288 memcpy(tvmem, template, tsize); 230 memcpy(tvmem, template, tsize);
289 cipher_tv = (void *)tvmem; 231 cipher_tv = (void *)tvmem;
290 232
291 if (mode) 233 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
292 tfm = crypto_alloc_tfm(algo, 0);
293 else
294 tfm = crypto_alloc_tfm(algo, CRYPTO_TFM_MODE_CBC);
295 234
296 if (tfm == NULL) { 235 if (IS_ERR(tfm)) {
297 printk("failed to load transform for %s %s\n", algo, m); 236 printk("failed to load transform for %s: %ld\n", algo,
237 PTR_ERR(tfm));
298 return; 238 return;
299 } 239 }
240 desc.tfm = tfm;
241 desc.flags = 0;
300 242
301 j = 0; 243 j = 0;
302 for (i = 0; i < tcount; i++) { 244 for (i = 0; i < tcount; i++) {
@@ -305,14 +247,17 @@ static void test_cipher(char *algo, int mode, int enc,
305 printk("test %u (%d bit key):\n", 247 printk("test %u (%d bit key):\n",
306 j, cipher_tv[i].klen * 8); 248 j, cipher_tv[i].klen * 8);
307 249
308 tfm->crt_flags = 0; 250 crypto_blkcipher_clear_flags(tfm, ~0);
309 if (cipher_tv[i].wk) 251 if (cipher_tv[i].wk)
310 tfm->crt_flags |= CRYPTO_TFM_REQ_WEAK_KEY; 252 crypto_blkcipher_set_flags(
253 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
311 key = cipher_tv[i].key; 254 key = cipher_tv[i].key;
312 255
313 ret = crypto_cipher_setkey(tfm, key, cipher_tv[i].klen); 256 ret = crypto_blkcipher_setkey(tfm, key,
257 cipher_tv[i].klen);
314 if (ret) { 258 if (ret) {
315 printk("setkey() failed flags=%x\n", tfm->crt_flags); 259 printk("setkey() failed flags=%x\n",
260 crypto_blkcipher_get_flags(tfm));
316 261
317 if (!cipher_tv[i].fail) 262 if (!cipher_tv[i].fail)
318 goto out; 263 goto out;
@@ -321,19 +266,19 @@ static void test_cipher(char *algo, int mode, int enc,
321 sg_set_buf(&sg[0], cipher_tv[i].input, 266 sg_set_buf(&sg[0], cipher_tv[i].input,
322 cipher_tv[i].ilen); 267 cipher_tv[i].ilen);
323 268
324 if (!mode) { 269 iv_len = crypto_blkcipher_ivsize(tfm);
325 crypto_cipher_set_iv(tfm, cipher_tv[i].iv, 270 if (iv_len)
326 crypto_tfm_alg_ivsize(tfm)); 271 crypto_blkcipher_set_iv(tfm, cipher_tv[i].iv,
327 } 272 iv_len);
328
329 if (enc)
330 ret = crypto_cipher_encrypt(tfm, sg, sg, cipher_tv[i].ilen);
331 else
332 ret = crypto_cipher_decrypt(tfm, sg, sg, cipher_tv[i].ilen);
333 273
274 len = cipher_tv[i].ilen;
275 ret = enc ?
276 crypto_blkcipher_encrypt(&desc, sg, sg, len) :
277 crypto_blkcipher_decrypt(&desc, sg, sg, len);
334 278
335 if (ret) { 279 if (ret) {
336 printk("%s () failed flags=%x\n", e, tfm->crt_flags); 280 printk("%s () failed flags=%x\n", e,
281 desc.flags);
337 goto out; 282 goto out;
338 } 283 }
339 284
@@ -346,7 +291,7 @@ static void test_cipher(char *algo, int mode, int enc,
346 } 291 }
347 } 292 }
348 293
349 printk("\ntesting %s %s %s across pages (chunking)\n", algo, m, e); 294 printk("\ntesting %s %s across pages (chunking)\n", algo, e);
350 memset(xbuf, 0, XBUFSIZE); 295 memset(xbuf, 0, XBUFSIZE);
351 296
352 j = 0; 297 j = 0;
@@ -356,14 +301,17 @@ static void test_cipher(char *algo, int mode, int enc,
356 printk("test %u (%d bit key):\n", 301 printk("test %u (%d bit key):\n",
357 j, cipher_tv[i].klen * 8); 302 j, cipher_tv[i].klen * 8);
358 303
359 tfm->crt_flags = 0; 304 crypto_blkcipher_clear_flags(tfm, ~0);
360 if (cipher_tv[i].wk) 305 if (cipher_tv[i].wk)
361 tfm->crt_flags |= CRYPTO_TFM_REQ_WEAK_KEY; 306 crypto_blkcipher_set_flags(
307 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
362 key = cipher_tv[i].key; 308 key = cipher_tv[i].key;
363 309
364 ret = crypto_cipher_setkey(tfm, key, cipher_tv[i].klen); 310 ret = crypto_blkcipher_setkey(tfm, key,
311 cipher_tv[i].klen);
365 if (ret) { 312 if (ret) {
366 printk("setkey() failed flags=%x\n", tfm->crt_flags); 313 printk("setkey() failed flags=%x\n",
314 crypto_blkcipher_get_flags(tfm));
367 315
368 if (!cipher_tv[i].fail) 316 if (!cipher_tv[i].fail)
369 goto out; 317 goto out;
@@ -379,18 +327,19 @@ static void test_cipher(char *algo, int mode, int enc,
379 cipher_tv[i].tap[k]); 327 cipher_tv[i].tap[k]);
380 } 328 }
381 329
382 if (!mode) { 330 iv_len = crypto_blkcipher_ivsize(tfm);
383 crypto_cipher_set_iv(tfm, cipher_tv[i].iv, 331 if (iv_len)
384 crypto_tfm_alg_ivsize(tfm)); 332 crypto_blkcipher_set_iv(tfm, cipher_tv[i].iv,
385 } 333 iv_len);
386 334
387 if (enc) 335 len = cipher_tv[i].ilen;
388 ret = crypto_cipher_encrypt(tfm, sg, sg, cipher_tv[i].ilen); 336 ret = enc ?
389 else 337 crypto_blkcipher_encrypt(&desc, sg, sg, len) :
390 ret = crypto_cipher_decrypt(tfm, sg, sg, cipher_tv[i].ilen); 338 crypto_blkcipher_decrypt(&desc, sg, sg, len);
391 339
392 if (ret) { 340 if (ret) {
393 printk("%s () failed flags=%x\n", e, tfm->crt_flags); 341 printk("%s () failed flags=%x\n", e,
342 desc.flags);
394 goto out; 343 goto out;
395 } 344 }
396 345
@@ -409,10 +358,10 @@ static void test_cipher(char *algo, int mode, int enc,
409 } 358 }
410 359
411out: 360out:
412 crypto_free_tfm(tfm); 361 crypto_free_blkcipher(tfm);
413} 362}
414 363
415static int test_cipher_jiffies(struct crypto_tfm *tfm, int enc, char *p, 364static int test_cipher_jiffies(struct blkcipher_desc *desc, int enc, char *p,
416 int blen, int sec) 365 int blen, int sec)
417{ 366{
418 struct scatterlist sg[1]; 367 struct scatterlist sg[1];
@@ -425,9 +374,9 @@ static int test_cipher_jiffies(struct crypto_tfm *tfm, int enc, char *p,
425 for (start = jiffies, end = start + sec * HZ, bcount = 0; 374 for (start = jiffies, end = start + sec * HZ, bcount = 0;
426 time_before(jiffies, end); bcount++) { 375 time_before(jiffies, end); bcount++) {
427 if (enc) 376 if (enc)
428 ret = crypto_cipher_encrypt(tfm, sg, sg, blen); 377 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
429 else 378 else
430 ret = crypto_cipher_decrypt(tfm, sg, sg, blen); 379 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
431 380
432 if (ret) 381 if (ret)
433 return ret; 382 return ret;
@@ -438,7 +387,7 @@ static int test_cipher_jiffies(struct crypto_tfm *tfm, int enc, char *p,
438 return 0; 387 return 0;
439} 388}
440 389
441static int test_cipher_cycles(struct crypto_tfm *tfm, int enc, char *p, 390static int test_cipher_cycles(struct blkcipher_desc *desc, int enc, char *p,
442 int blen) 391 int blen)
443{ 392{
444 struct scatterlist sg[1]; 393 struct scatterlist sg[1];
@@ -454,9 +403,9 @@ static int test_cipher_cycles(struct crypto_tfm *tfm, int enc, char *p,
454 /* Warm-up run. */ 403 /* Warm-up run. */
455 for (i = 0; i < 4; i++) { 404 for (i = 0; i < 4; i++) {
456 if (enc) 405 if (enc)
457 ret = crypto_cipher_encrypt(tfm, sg, sg, blen); 406 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
458 else 407 else
459 ret = crypto_cipher_decrypt(tfm, sg, sg, blen); 408 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
460 409
461 if (ret) 410 if (ret)
462 goto out; 411 goto out;
@@ -468,9 +417,9 @@ static int test_cipher_cycles(struct crypto_tfm *tfm, int enc, char *p,
468 417
469 start = get_cycles(); 418 start = get_cycles();
470 if (enc) 419 if (enc)
471 ret = crypto_cipher_encrypt(tfm, sg, sg, blen); 420 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen);
472 else 421 else
473 ret = crypto_cipher_decrypt(tfm, sg, sg, blen); 422 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen);
474 end = get_cycles(); 423 end = get_cycles();
475 424
476 if (ret) 425 if (ret)
@@ -490,35 +439,32 @@ out:
490 return ret; 439 return ret;
491} 440}
492 441
493static void test_cipher_speed(char *algo, int mode, int enc, unsigned int sec, 442static void test_cipher_speed(char *algo, int enc, unsigned int sec,
494 struct cipher_testvec *template, 443 struct cipher_testvec *template,
495 unsigned int tcount, struct cipher_speed *speed) 444 unsigned int tcount, struct cipher_speed *speed)
496{ 445{
497 unsigned int ret, i, j, iv_len; 446 unsigned int ret, i, j, iv_len;
498 unsigned char *key, *p, iv[128]; 447 unsigned char *key, *p, iv[128];
499 struct crypto_tfm *tfm; 448 struct crypto_blkcipher *tfm;
500 const char *e, *m; 449 struct blkcipher_desc desc;
450 const char *e;
501 451
502 if (enc == ENCRYPT) 452 if (enc == ENCRYPT)
503 e = "encryption"; 453 e = "encryption";
504 else 454 else
505 e = "decryption"; 455 e = "decryption";
506 if (mode == MODE_ECB)
507 m = "ECB";
508 else
509 m = "CBC";
510 456
511 printk("\ntesting speed of %s %s %s\n", algo, m, e); 457 printk("\ntesting speed of %s %s\n", algo, e);
512 458
513 if (mode) 459 tfm = crypto_alloc_blkcipher(algo, 0, CRYPTO_ALG_ASYNC);
514 tfm = crypto_alloc_tfm(algo, 0);
515 else
516 tfm = crypto_alloc_tfm(algo, CRYPTO_TFM_MODE_CBC);
517 460
518 if (tfm == NULL) { 461 if (IS_ERR(tfm)) {
519 printk("failed to load transform for %s %s\n", algo, m); 462 printk("failed to load transform for %s: %ld\n", algo,
463 PTR_ERR(tfm));
520 return; 464 return;
521 } 465 }
466 desc.tfm = tfm;
467 desc.flags = 0;
522 468
523 for (i = 0; speed[i].klen != 0; i++) { 469 for (i = 0; speed[i].klen != 0; i++) {
524 if ((speed[i].blen + speed[i].klen) > TVMEMSIZE) { 470 if ((speed[i].blen + speed[i].klen) > TVMEMSIZE) {
@@ -542,125 +488,231 @@ static void test_cipher_speed(char *algo, int mode, int enc, unsigned int sec,
542 } 488 }
543 p = (unsigned char *)tvmem + speed[i].klen; 489 p = (unsigned char *)tvmem + speed[i].klen;
544 490
545 ret = crypto_cipher_setkey(tfm, key, speed[i].klen); 491 ret = crypto_blkcipher_setkey(tfm, key, speed[i].klen);
546 if (ret) { 492 if (ret) {
547 printk("setkey() failed flags=%x\n", tfm->crt_flags); 493 printk("setkey() failed flags=%x\n",
494 crypto_blkcipher_get_flags(tfm));
548 goto out; 495 goto out;
549 } 496 }
550 497
551 if (!mode) { 498 iv_len = crypto_blkcipher_ivsize(tfm);
552 iv_len = crypto_tfm_alg_ivsize(tfm); 499 if (iv_len) {
553 memset(&iv, 0xff, iv_len); 500 memset(&iv, 0xff, iv_len);
554 crypto_cipher_set_iv(tfm, iv, iv_len); 501 crypto_blkcipher_set_iv(tfm, iv, iv_len);
555 } 502 }
556 503
557 if (sec) 504 if (sec)
558 ret = test_cipher_jiffies(tfm, enc, p, speed[i].blen, 505 ret = test_cipher_jiffies(&desc, enc, p, speed[i].blen,
559 sec); 506 sec);
560 else 507 else
561 ret = test_cipher_cycles(tfm, enc, p, speed[i].blen); 508 ret = test_cipher_cycles(&desc, enc, p, speed[i].blen);
562 509
563 if (ret) { 510 if (ret) {
564 printk("%s() failed flags=%x\n", e, tfm->crt_flags); 511 printk("%s() failed flags=%x\n", e, desc.flags);
565 break; 512 break;
566 } 513 }
567 } 514 }
568 515
569out: 516out:
570 crypto_free_tfm(tfm); 517 crypto_free_blkcipher(tfm);
571} 518}
572 519
573static void test_digest_jiffies(struct crypto_tfm *tfm, char *p, int blen, 520static int test_hash_jiffies_digest(struct hash_desc *desc, char *p, int blen,
574 int plen, char *out, int sec) 521 char *out, int sec)
522{
523 struct scatterlist sg[1];
524 unsigned long start, end;
525 int bcount;
526 int ret;
527
528 for (start = jiffies, end = start + sec * HZ, bcount = 0;
529 time_before(jiffies, end); bcount++) {
530 sg_set_buf(sg, p, blen);
531 ret = crypto_hash_digest(desc, sg, blen, out);
532 if (ret)
533 return ret;
534 }
535
536 printk("%6u opers/sec, %9lu bytes/sec\n",
537 bcount / sec, ((long)bcount * blen) / sec);
538
539 return 0;
540}
541
542static int test_hash_jiffies(struct hash_desc *desc, char *p, int blen,
543 int plen, char *out, int sec)
575{ 544{
576 struct scatterlist sg[1]; 545 struct scatterlist sg[1];
577 unsigned long start, end; 546 unsigned long start, end;
578 int bcount, pcount; 547 int bcount, pcount;
548 int ret;
549
550 if (plen == blen)
551 return test_hash_jiffies_digest(desc, p, blen, out, sec);
579 552
580 for (start = jiffies, end = start + sec * HZ, bcount = 0; 553 for (start = jiffies, end = start + sec * HZ, bcount = 0;
581 time_before(jiffies, end); bcount++) { 554 time_before(jiffies, end); bcount++) {
582 crypto_digest_init(tfm); 555 ret = crypto_hash_init(desc);
556 if (ret)
557 return ret;
583 for (pcount = 0; pcount < blen; pcount += plen) { 558 for (pcount = 0; pcount < blen; pcount += plen) {
584 sg_set_buf(sg, p + pcount, plen); 559 sg_set_buf(sg, p + pcount, plen);
585 crypto_digest_update(tfm, sg, 1); 560 ret = crypto_hash_update(desc, sg, plen);
561 if (ret)
562 return ret;
586 } 563 }
587 /* we assume there is enough space in 'out' for the result */ 564 /* we assume there is enough space in 'out' for the result */
588 crypto_digest_final(tfm, out); 565 ret = crypto_hash_final(desc, out);
566 if (ret)
567 return ret;
589 } 568 }
590 569
591 printk("%6u opers/sec, %9lu bytes/sec\n", 570 printk("%6u opers/sec, %9lu bytes/sec\n",
592 bcount / sec, ((long)bcount * blen) / sec); 571 bcount / sec, ((long)bcount * blen) / sec);
593 572
594 return; 573 return 0;
574}
575
576static int test_hash_cycles_digest(struct hash_desc *desc, char *p, int blen,
577 char *out)
578{
579 struct scatterlist sg[1];
580 unsigned long cycles = 0;
581 int i;
582 int ret;
583
584 local_bh_disable();
585 local_irq_disable();
586
587 /* Warm-up run. */
588 for (i = 0; i < 4; i++) {
589 sg_set_buf(sg, p, blen);
590 ret = crypto_hash_digest(desc, sg, blen, out);
591 if (ret)
592 goto out;
593 }
594
595 /* The real thing. */
596 for (i = 0; i < 8; i++) {
597 cycles_t start, end;
598
599 start = get_cycles();
600
601 sg_set_buf(sg, p, blen);
602 ret = crypto_hash_digest(desc, sg, blen, out);
603 if (ret)
604 goto out;
605
606 end = get_cycles();
607
608 cycles += end - start;
609 }
610
611out:
612 local_irq_enable();
613 local_bh_enable();
614
615 if (ret)
616 return ret;
617
618 printk("%6lu cycles/operation, %4lu cycles/byte\n",
619 cycles / 8, cycles / (8 * blen));
620
621 return 0;
595} 622}
596 623
597static void test_digest_cycles(struct crypto_tfm *tfm, char *p, int blen, 624static int test_hash_cycles(struct hash_desc *desc, char *p, int blen,
598 int plen, char *out) 625 int plen, char *out)
599{ 626{
600 struct scatterlist sg[1]; 627 struct scatterlist sg[1];
601 unsigned long cycles = 0; 628 unsigned long cycles = 0;
602 int i, pcount; 629 int i, pcount;
630 int ret;
631
632 if (plen == blen)
633 return test_hash_cycles_digest(desc, p, blen, out);
603 634
604 local_bh_disable(); 635 local_bh_disable();
605 local_irq_disable(); 636 local_irq_disable();
606 637
607 /* Warm-up run. */ 638 /* Warm-up run. */
608 for (i = 0; i < 4; i++) { 639 for (i = 0; i < 4; i++) {
609 crypto_digest_init(tfm); 640 ret = crypto_hash_init(desc);
641 if (ret)
642 goto out;
610 for (pcount = 0; pcount < blen; pcount += plen) { 643 for (pcount = 0; pcount < blen; pcount += plen) {
611 sg_set_buf(sg, p + pcount, plen); 644 sg_set_buf(sg, p + pcount, plen);
612 crypto_digest_update(tfm, sg, 1); 645 ret = crypto_hash_update(desc, sg, plen);
646 if (ret)
647 goto out;
613 } 648 }
614 crypto_digest_final(tfm, out); 649 crypto_hash_final(desc, out);
650 if (ret)
651 goto out;
615 } 652 }
616 653
617 /* The real thing. */ 654 /* The real thing. */
618 for (i = 0; i < 8; i++) { 655 for (i = 0; i < 8; i++) {
619 cycles_t start, end; 656 cycles_t start, end;
620 657
621 crypto_digest_init(tfm);
622
623 start = get_cycles(); 658 start = get_cycles();
624 659
660 ret = crypto_hash_init(desc);
661 if (ret)
662 goto out;
625 for (pcount = 0; pcount < blen; pcount += plen) { 663 for (pcount = 0; pcount < blen; pcount += plen) {
626 sg_set_buf(sg, p + pcount, plen); 664 sg_set_buf(sg, p + pcount, plen);
627 crypto_digest_update(tfm, sg, 1); 665 ret = crypto_hash_update(desc, sg, plen);
666 if (ret)
667 goto out;
628 } 668 }
629 crypto_digest_final(tfm, out); 669 ret = crypto_hash_final(desc, out);
670 if (ret)
671 goto out;
630 672
631 end = get_cycles(); 673 end = get_cycles();
632 674
633 cycles += end - start; 675 cycles += end - start;
634 } 676 }
635 677
678out:
636 local_irq_enable(); 679 local_irq_enable();
637 local_bh_enable(); 680 local_bh_enable();
638 681
682 if (ret)
683 return ret;
684
639 printk("%6lu cycles/operation, %4lu cycles/byte\n", 685 printk("%6lu cycles/operation, %4lu cycles/byte\n",
640 cycles / 8, cycles / (8 * blen)); 686 cycles / 8, cycles / (8 * blen));
641 687
642 return; 688 return 0;
643} 689}
644 690
645static void test_digest_speed(char *algo, unsigned int sec, 691static void test_hash_speed(char *algo, unsigned int sec,
646 struct digest_speed *speed) 692 struct hash_speed *speed)
647{ 693{
648 struct crypto_tfm *tfm; 694 struct crypto_hash *tfm;
695 struct hash_desc desc;
649 char output[1024]; 696 char output[1024];
650 int i; 697 int i;
698 int ret;
651 699
652 printk("\ntesting speed of %s\n", algo); 700 printk("\ntesting speed of %s\n", algo);
653 701
654 tfm = crypto_alloc_tfm(algo, 0); 702 tfm = crypto_alloc_hash(algo, 0, CRYPTO_ALG_ASYNC);
655 703
656 if (tfm == NULL) { 704 if (IS_ERR(tfm)) {
657 printk("failed to load transform for %s\n", algo); 705 printk("failed to load transform for %s: %ld\n", algo,
706 PTR_ERR(tfm));
658 return; 707 return;
659 } 708 }
660 709
661 if (crypto_tfm_alg_digestsize(tfm) > sizeof(output)) { 710 desc.tfm = tfm;
711 desc.flags = 0;
712
713 if (crypto_hash_digestsize(tfm) > sizeof(output)) {
662 printk("digestsize(%u) > outputbuffer(%zu)\n", 714 printk("digestsize(%u) > outputbuffer(%zu)\n",
663 crypto_tfm_alg_digestsize(tfm), sizeof(output)); 715 crypto_hash_digestsize(tfm), sizeof(output));
664 goto out; 716 goto out;
665 } 717 }
666 718
@@ -677,20 +729,27 @@ static void test_digest_speed(char *algo, unsigned int sec,
677 memset(tvmem, 0xff, speed[i].blen); 729 memset(tvmem, 0xff, speed[i].blen);
678 730
679 if (sec) 731 if (sec)
680 test_digest_jiffies(tfm, tvmem, speed[i].blen, speed[i].plen, output, sec); 732 ret = test_hash_jiffies(&desc, tvmem, speed[i].blen,
733 speed[i].plen, output, sec);
681 else 734 else
682 test_digest_cycles(tfm, tvmem, speed[i].blen, speed[i].plen, output); 735 ret = test_hash_cycles(&desc, tvmem, speed[i].blen,
736 speed[i].plen, output);
737
738 if (ret) {
739 printk("hashing failed ret=%d\n", ret);
740 break;
741 }
683 } 742 }
684 743
685out: 744out:
686 crypto_free_tfm(tfm); 745 crypto_free_hash(tfm);
687} 746}
688 747
689static void test_deflate(void) 748static void test_deflate(void)
690{ 749{
691 unsigned int i; 750 unsigned int i;
692 char result[COMP_BUF_SIZE]; 751 char result[COMP_BUF_SIZE];
693 struct crypto_tfm *tfm; 752 struct crypto_comp *tfm;
694 struct comp_testvec *tv; 753 struct comp_testvec *tv;
695 unsigned int tsize; 754 unsigned int tsize;
696 755
@@ -762,105 +821,7 @@ static void test_deflate(void)
762 ilen, dlen); 821 ilen, dlen);
763 } 822 }
764out: 823out:
765 crypto_free_tfm(tfm); 824 crypto_free_comp(tfm);
766}
767
768static void test_crc32c(void)
769{
770#define NUMVEC 6
771#define VECSIZE 40
772
773 int i, j, pass;
774 u32 crc;
775 u8 b, test_vec[NUMVEC][VECSIZE];
776 static u32 vec_results[NUMVEC] = {
777 0x0e2c157f, 0xe980ebf6, 0xde74bded,
778 0xd579c862, 0xba979ad0, 0x2b29d913
779 };
780 static u32 tot_vec_results = 0x24c5d375;
781
782 struct scatterlist sg[NUMVEC];
783 struct crypto_tfm *tfm;
784 char *fmtdata = "testing crc32c initialized to %08x: %s\n";
785#define SEEDTESTVAL 0xedcba987
786 u32 seed;
787
788 printk("\ntesting crc32c\n");
789
790 tfm = crypto_alloc_tfm("crc32c", 0);
791 if (tfm == NULL) {
792 printk("failed to load transform for crc32c\n");
793 return;
794 }
795
796 crypto_digest_init(tfm);
797 crypto_digest_final(tfm, (u8*)&crc);
798 printk(fmtdata, crc, (crc == 0) ? "pass" : "ERROR");
799
800 /*
801 * stuff test_vec with known values, simple incrementing
802 * byte values.
803 */
804 b = 0;
805 for (i = 0; i < NUMVEC; i++) {
806 for (j = 0; j < VECSIZE; j++)
807 test_vec[i][j] = ++b;
808 sg_set_buf(&sg[i], test_vec[i], VECSIZE);
809 }
810
811 seed = SEEDTESTVAL;
812 (void)crypto_digest_setkey(tfm, (const u8*)&seed, sizeof(u32));
813 crypto_digest_final(tfm, (u8*)&crc);
814 printk("testing crc32c setkey returns %08x : %s\n", crc, (crc == (SEEDTESTVAL ^ ~(u32)0)) ?
815 "pass" : "ERROR");
816
817 printk("testing crc32c using update/final:\n");
818
819 pass = 1; /* assume all is well */
820
821 for (i = 0; i < NUMVEC; i++) {
822 seed = ~(u32)0;
823 (void)crypto_digest_setkey(tfm, (const u8*)&seed, sizeof(u32));
824 crypto_digest_update(tfm, &sg[i], 1);
825 crypto_digest_final(tfm, (u8*)&crc);
826 if (crc == vec_results[i]) {
827 printk(" %08x:OK", crc);
828 } else {
829 printk(" %08x:BAD, wanted %08x\n", crc, vec_results[i]);
830 pass = 0;
831 }
832 }
833
834 printk("\ntesting crc32c using incremental accumulator:\n");
835 crc = 0;
836 for (i = 0; i < NUMVEC; i++) {
837 seed = (crc ^ ~(u32)0);
838 (void)crypto_digest_setkey(tfm, (const u8*)&seed, sizeof(u32));
839 crypto_digest_update(tfm, &sg[i], 1);
840 crypto_digest_final(tfm, (u8*)&crc);
841 }
842 if (crc == tot_vec_results) {
843 printk(" %08x:OK", crc);
844 } else {
845 printk(" %08x:BAD, wanted %08x\n", crc, tot_vec_results);
846 pass = 0;
847 }
848
849 printk("\ntesting crc32c using digest:\n");
850 seed = ~(u32)0;
851 (void)crypto_digest_setkey(tfm, (const u8*)&seed, sizeof(u32));
852 crypto_digest_digest(tfm, sg, NUMVEC, (u8*)&crc);
853 if (crc == tot_vec_results) {
854 printk(" %08x:OK", crc);
855 } else {
856 printk(" %08x:BAD, wanted %08x\n", crc, tot_vec_results);
857 pass = 0;
858 }
859
860 printk("\n%s\n", pass ? "pass" : "ERROR");
861
862 crypto_free_tfm(tfm);
863 printk("crc32c test complete\n");
864} 825}
865 826
866static void test_available(void) 827static void test_available(void)
@@ -869,8 +830,8 @@ static void test_available(void)
869 830
870 while (*name) { 831 while (*name) {
871 printk("alg %s ", *name); 832 printk("alg %s ", *name);
872 printk((crypto_alg_available(*name, 0)) ? 833 printk(crypto_has_alg(*name, 0, CRYPTO_ALG_ASYNC) ?
873 "found\n" : "not found\n"); 834 "found\n" : "not found\n");
874 name++; 835 name++;
875 } 836 }
876} 837}
@@ -885,79 +846,119 @@ static void do_test(void)
885 test_hash("sha1", sha1_tv_template, SHA1_TEST_VECTORS); 846 test_hash("sha1", sha1_tv_template, SHA1_TEST_VECTORS);
886 847
887 //DES 848 //DES
888 test_cipher ("des", MODE_ECB, ENCRYPT, des_enc_tv_template, DES_ENC_TEST_VECTORS); 849 test_cipher("ecb(des)", ENCRYPT, des_enc_tv_template,
889 test_cipher ("des", MODE_ECB, DECRYPT, des_dec_tv_template, DES_DEC_TEST_VECTORS); 850 DES_ENC_TEST_VECTORS);
890 test_cipher ("des", MODE_CBC, ENCRYPT, des_cbc_enc_tv_template, DES_CBC_ENC_TEST_VECTORS); 851 test_cipher("ecb(des)", DECRYPT, des_dec_tv_template,
891 test_cipher ("des", MODE_CBC, DECRYPT, des_cbc_dec_tv_template, DES_CBC_DEC_TEST_VECTORS); 852 DES_DEC_TEST_VECTORS);
853 test_cipher("cbc(des)", ENCRYPT, des_cbc_enc_tv_template,
854 DES_CBC_ENC_TEST_VECTORS);
855 test_cipher("cbc(des)", DECRYPT, des_cbc_dec_tv_template,
856 DES_CBC_DEC_TEST_VECTORS);
892 857
893 //DES3_EDE 858 //DES3_EDE
894 test_cipher ("des3_ede", MODE_ECB, ENCRYPT, des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS); 859 test_cipher("ecb(des3_ede)", ENCRYPT, des3_ede_enc_tv_template,
895 test_cipher ("des3_ede", MODE_ECB, DECRYPT, des3_ede_dec_tv_template, DES3_EDE_DEC_TEST_VECTORS); 860 DES3_EDE_ENC_TEST_VECTORS);
861 test_cipher("ecb(des3_ede)", DECRYPT, des3_ede_dec_tv_template,
862 DES3_EDE_DEC_TEST_VECTORS);
896 863
897 test_hash("md4", md4_tv_template, MD4_TEST_VECTORS); 864 test_hash("md4", md4_tv_template, MD4_TEST_VECTORS);
898 865
899 test_hash("sha256", sha256_tv_template, SHA256_TEST_VECTORS); 866 test_hash("sha256", sha256_tv_template, SHA256_TEST_VECTORS);
900 867
901 //BLOWFISH 868 //BLOWFISH
902 test_cipher ("blowfish", MODE_ECB, ENCRYPT, bf_enc_tv_template, BF_ENC_TEST_VECTORS); 869 test_cipher("ecb(blowfish)", ENCRYPT, bf_enc_tv_template,
903 test_cipher ("blowfish", MODE_ECB, DECRYPT, bf_dec_tv_template, BF_DEC_TEST_VECTORS); 870 BF_ENC_TEST_VECTORS);
904 test_cipher ("blowfish", MODE_CBC, ENCRYPT, bf_cbc_enc_tv_template, BF_CBC_ENC_TEST_VECTORS); 871 test_cipher("ecb(blowfish)", DECRYPT, bf_dec_tv_template,
905 test_cipher ("blowfish", MODE_CBC, DECRYPT, bf_cbc_dec_tv_template, BF_CBC_DEC_TEST_VECTORS); 872 BF_DEC_TEST_VECTORS);
873 test_cipher("cbc(blowfish)", ENCRYPT, bf_cbc_enc_tv_template,
874 BF_CBC_ENC_TEST_VECTORS);
875 test_cipher("cbc(blowfish)", DECRYPT, bf_cbc_dec_tv_template,
876 BF_CBC_DEC_TEST_VECTORS);
906 877
907 //TWOFISH 878 //TWOFISH
908 test_cipher ("twofish", MODE_ECB, ENCRYPT, tf_enc_tv_template, TF_ENC_TEST_VECTORS); 879 test_cipher("ecb(twofish)", ENCRYPT, tf_enc_tv_template,
909 test_cipher ("twofish", MODE_ECB, DECRYPT, tf_dec_tv_template, TF_DEC_TEST_VECTORS); 880 TF_ENC_TEST_VECTORS);
910 test_cipher ("twofish", MODE_CBC, ENCRYPT, tf_cbc_enc_tv_template, TF_CBC_ENC_TEST_VECTORS); 881 test_cipher("ecb(twofish)", DECRYPT, tf_dec_tv_template,
911 test_cipher ("twofish", MODE_CBC, DECRYPT, tf_cbc_dec_tv_template, TF_CBC_DEC_TEST_VECTORS); 882 TF_DEC_TEST_VECTORS);
883 test_cipher("cbc(twofish)", ENCRYPT, tf_cbc_enc_tv_template,
884 TF_CBC_ENC_TEST_VECTORS);
885 test_cipher("cbc(twofish)", DECRYPT, tf_cbc_dec_tv_template,
886 TF_CBC_DEC_TEST_VECTORS);
912 887
913 //SERPENT 888 //SERPENT
914 test_cipher ("serpent", MODE_ECB, ENCRYPT, serpent_enc_tv_template, SERPENT_ENC_TEST_VECTORS); 889 test_cipher("ecb(serpent)", ENCRYPT, serpent_enc_tv_template,
915 test_cipher ("serpent", MODE_ECB, DECRYPT, serpent_dec_tv_template, SERPENT_DEC_TEST_VECTORS); 890 SERPENT_ENC_TEST_VECTORS);
891 test_cipher("ecb(serpent)", DECRYPT, serpent_dec_tv_template,
892 SERPENT_DEC_TEST_VECTORS);
916 893
917 //TNEPRES 894 //TNEPRES
918 test_cipher ("tnepres", MODE_ECB, ENCRYPT, tnepres_enc_tv_template, TNEPRES_ENC_TEST_VECTORS); 895 test_cipher("ecb(tnepres)", ENCRYPT, tnepres_enc_tv_template,
919 test_cipher ("tnepres", MODE_ECB, DECRYPT, tnepres_dec_tv_template, TNEPRES_DEC_TEST_VECTORS); 896 TNEPRES_ENC_TEST_VECTORS);
897 test_cipher("ecb(tnepres)", DECRYPT, tnepres_dec_tv_template,
898 TNEPRES_DEC_TEST_VECTORS);
920 899
921 //AES 900 //AES
922 test_cipher ("aes", MODE_ECB, ENCRYPT, aes_enc_tv_template, AES_ENC_TEST_VECTORS); 901 test_cipher("ecb(aes)", ENCRYPT, aes_enc_tv_template,
923 test_cipher ("aes", MODE_ECB, DECRYPT, aes_dec_tv_template, AES_DEC_TEST_VECTORS); 902 AES_ENC_TEST_VECTORS);
924 test_cipher ("aes", MODE_CBC, ENCRYPT, aes_cbc_enc_tv_template, AES_CBC_ENC_TEST_VECTORS); 903 test_cipher("ecb(aes)", DECRYPT, aes_dec_tv_template,
925 test_cipher ("aes", MODE_CBC, DECRYPT, aes_cbc_dec_tv_template, AES_CBC_DEC_TEST_VECTORS); 904 AES_DEC_TEST_VECTORS);
905 test_cipher("cbc(aes)", ENCRYPT, aes_cbc_enc_tv_template,
906 AES_CBC_ENC_TEST_VECTORS);
907 test_cipher("cbc(aes)", DECRYPT, aes_cbc_dec_tv_template,
908 AES_CBC_DEC_TEST_VECTORS);
926 909
927 //CAST5 910 //CAST5
928 test_cipher ("cast5", MODE_ECB, ENCRYPT, cast5_enc_tv_template, CAST5_ENC_TEST_VECTORS); 911 test_cipher("ecb(cast5)", ENCRYPT, cast5_enc_tv_template,
929 test_cipher ("cast5", MODE_ECB, DECRYPT, cast5_dec_tv_template, CAST5_DEC_TEST_VECTORS); 912 CAST5_ENC_TEST_VECTORS);
913 test_cipher("ecb(cast5)", DECRYPT, cast5_dec_tv_template,
914 CAST5_DEC_TEST_VECTORS);
930 915
931 //CAST6 916 //CAST6
932 test_cipher ("cast6", MODE_ECB, ENCRYPT, cast6_enc_tv_template, CAST6_ENC_TEST_VECTORS); 917 test_cipher("ecb(cast6)", ENCRYPT, cast6_enc_tv_template,
933 test_cipher ("cast6", MODE_ECB, DECRYPT, cast6_dec_tv_template, CAST6_DEC_TEST_VECTORS); 918 CAST6_ENC_TEST_VECTORS);
919 test_cipher("ecb(cast6)", DECRYPT, cast6_dec_tv_template,
920 CAST6_DEC_TEST_VECTORS);
934 921
935 //ARC4 922 //ARC4
936 test_cipher ("arc4", MODE_ECB, ENCRYPT, arc4_enc_tv_template, ARC4_ENC_TEST_VECTORS); 923 test_cipher("ecb(arc4)", ENCRYPT, arc4_enc_tv_template,
937 test_cipher ("arc4", MODE_ECB, DECRYPT, arc4_dec_tv_template, ARC4_DEC_TEST_VECTORS); 924 ARC4_ENC_TEST_VECTORS);
925 test_cipher("ecb(arc4)", DECRYPT, arc4_dec_tv_template,
926 ARC4_DEC_TEST_VECTORS);
938 927
939 //TEA 928 //TEA
940 test_cipher ("tea", MODE_ECB, ENCRYPT, tea_enc_tv_template, TEA_ENC_TEST_VECTORS); 929 test_cipher("ecb(tea)", ENCRYPT, tea_enc_tv_template,
941 test_cipher ("tea", MODE_ECB, DECRYPT, tea_dec_tv_template, TEA_DEC_TEST_VECTORS); 930 TEA_ENC_TEST_VECTORS);
931 test_cipher("ecb(tea)", DECRYPT, tea_dec_tv_template,
932 TEA_DEC_TEST_VECTORS);
942 933
943 934
944 //XTEA 935 //XTEA
945 test_cipher ("xtea", MODE_ECB, ENCRYPT, xtea_enc_tv_template, XTEA_ENC_TEST_VECTORS); 936 test_cipher("ecb(xtea)", ENCRYPT, xtea_enc_tv_template,
946 test_cipher ("xtea", MODE_ECB, DECRYPT, xtea_dec_tv_template, XTEA_DEC_TEST_VECTORS); 937 XTEA_ENC_TEST_VECTORS);
938 test_cipher("ecb(xtea)", DECRYPT, xtea_dec_tv_template,
939 XTEA_DEC_TEST_VECTORS);
947 940
948 //KHAZAD 941 //KHAZAD
949 test_cipher ("khazad", MODE_ECB, ENCRYPT, khazad_enc_tv_template, KHAZAD_ENC_TEST_VECTORS); 942 test_cipher("ecb(khazad)", ENCRYPT, khazad_enc_tv_template,
950 test_cipher ("khazad", MODE_ECB, DECRYPT, khazad_dec_tv_template, KHAZAD_DEC_TEST_VECTORS); 943 KHAZAD_ENC_TEST_VECTORS);
944 test_cipher("ecb(khazad)", DECRYPT, khazad_dec_tv_template,
945 KHAZAD_DEC_TEST_VECTORS);
951 946
952 //ANUBIS 947 //ANUBIS
953 test_cipher ("anubis", MODE_ECB, ENCRYPT, anubis_enc_tv_template, ANUBIS_ENC_TEST_VECTORS); 948 test_cipher("ecb(anubis)", ENCRYPT, anubis_enc_tv_template,
954 test_cipher ("anubis", MODE_ECB, DECRYPT, anubis_dec_tv_template, ANUBIS_DEC_TEST_VECTORS); 949 ANUBIS_ENC_TEST_VECTORS);
955 test_cipher ("anubis", MODE_CBC, ENCRYPT, anubis_cbc_enc_tv_template, ANUBIS_CBC_ENC_TEST_VECTORS); 950 test_cipher("ecb(anubis)", DECRYPT, anubis_dec_tv_template,
956 test_cipher ("anubis", MODE_CBC, DECRYPT, anubis_cbc_dec_tv_template, ANUBIS_CBC_ENC_TEST_VECTORS); 951 ANUBIS_DEC_TEST_VECTORS);
952 test_cipher("cbc(anubis)", ENCRYPT, anubis_cbc_enc_tv_template,
953 ANUBIS_CBC_ENC_TEST_VECTORS);
954 test_cipher("cbc(anubis)", DECRYPT, anubis_cbc_dec_tv_template,
955 ANUBIS_CBC_ENC_TEST_VECTORS);
957 956
958 //XETA 957 //XETA
959 test_cipher ("xeta", MODE_ECB, ENCRYPT, xeta_enc_tv_template, XETA_ENC_TEST_VECTORS); 958 test_cipher("ecb(xeta)", ENCRYPT, xeta_enc_tv_template,
960 test_cipher ("xeta", MODE_ECB, DECRYPT, xeta_dec_tv_template, XETA_DEC_TEST_VECTORS); 959 XETA_ENC_TEST_VECTORS);
960 test_cipher("ecb(xeta)", DECRYPT, xeta_dec_tv_template,
961 XETA_DEC_TEST_VECTORS);
961 962
962 test_hash("sha384", sha384_tv_template, SHA384_TEST_VECTORS); 963 test_hash("sha384", sha384_tv_template, SHA384_TEST_VECTORS);
963 test_hash("sha512", sha512_tv_template, SHA512_TEST_VECTORS); 964 test_hash("sha512", sha512_tv_template, SHA512_TEST_VECTORS);
@@ -968,12 +969,13 @@ static void do_test(void)
968 test_hash("tgr160", tgr160_tv_template, TGR160_TEST_VECTORS); 969 test_hash("tgr160", tgr160_tv_template, TGR160_TEST_VECTORS);
969 test_hash("tgr128", tgr128_tv_template, TGR128_TEST_VECTORS); 970 test_hash("tgr128", tgr128_tv_template, TGR128_TEST_VECTORS);
970 test_deflate(); 971 test_deflate();
971 test_crc32c(); 972 test_hash("crc32c", crc32c_tv_template, CRC32C_TEST_VECTORS);
972#ifdef CONFIG_CRYPTO_HMAC 973 test_hash("hmac(md5)", hmac_md5_tv_template,
973 test_hmac("md5", hmac_md5_tv_template, HMAC_MD5_TEST_VECTORS); 974 HMAC_MD5_TEST_VECTORS);
974 test_hmac("sha1", hmac_sha1_tv_template, HMAC_SHA1_TEST_VECTORS); 975 test_hash("hmac(sha1)", hmac_sha1_tv_template,
975 test_hmac("sha256", hmac_sha256_tv_template, HMAC_SHA256_TEST_VECTORS); 976 HMAC_SHA1_TEST_VECTORS);
976#endif 977 test_hash("hmac(sha256)", hmac_sha256_tv_template,
978 HMAC_SHA256_TEST_VECTORS);
977 979
978 test_hash("michael_mic", michael_mic_tv_template, MICHAEL_MIC_TEST_VECTORS); 980 test_hash("michael_mic", michael_mic_tv_template, MICHAEL_MIC_TEST_VECTORS);
979 break; 981 break;
@@ -987,15 +989,21 @@ static void do_test(void)
987 break; 989 break;
988 990
989 case 3: 991 case 3:
990 test_cipher ("des", MODE_ECB, ENCRYPT, des_enc_tv_template, DES_ENC_TEST_VECTORS); 992 test_cipher("ecb(des)", ENCRYPT, des_enc_tv_template,
991 test_cipher ("des", MODE_ECB, DECRYPT, des_dec_tv_template, DES_DEC_TEST_VECTORS); 993 DES_ENC_TEST_VECTORS);
992 test_cipher ("des", MODE_CBC, ENCRYPT, des_cbc_enc_tv_template, DES_CBC_ENC_TEST_VECTORS); 994 test_cipher("ecb(des)", DECRYPT, des_dec_tv_template,
993 test_cipher ("des", MODE_CBC, DECRYPT, des_cbc_dec_tv_template, DES_CBC_DEC_TEST_VECTORS); 995 DES_DEC_TEST_VECTORS);
996 test_cipher("cbc(des)", ENCRYPT, des_cbc_enc_tv_template,
997 DES_CBC_ENC_TEST_VECTORS);
998 test_cipher("cbc(des)", DECRYPT, des_cbc_dec_tv_template,
999 DES_CBC_DEC_TEST_VECTORS);
994 break; 1000 break;
995 1001
996 case 4: 1002 case 4:
997 test_cipher ("des3_ede", MODE_ECB, ENCRYPT, des3_ede_enc_tv_template, DES3_EDE_ENC_TEST_VECTORS); 1003 test_cipher("ecb(des3_ede)", ENCRYPT, des3_ede_enc_tv_template,
998 test_cipher ("des3_ede", MODE_ECB, DECRYPT, des3_ede_dec_tv_template, DES3_EDE_DEC_TEST_VECTORS); 1004 DES3_EDE_ENC_TEST_VECTORS);
1005 test_cipher("ecb(des3_ede)", DECRYPT, des3_ede_dec_tv_template,
1006 DES3_EDE_DEC_TEST_VECTORS);
999 break; 1007 break;
1000 1008
1001 case 5: 1009 case 5:
@@ -1007,29 +1015,43 @@ static void do_test(void)
1007 break; 1015 break;
1008 1016
1009 case 7: 1017 case 7:
1010 test_cipher ("blowfish", MODE_ECB, ENCRYPT, bf_enc_tv_template, BF_ENC_TEST_VECTORS); 1018 test_cipher("ecb(blowfish)", ENCRYPT, bf_enc_tv_template,
1011 test_cipher ("blowfish", MODE_ECB, DECRYPT, bf_dec_tv_template, BF_DEC_TEST_VECTORS); 1019 BF_ENC_TEST_VECTORS);
1012 test_cipher ("blowfish", MODE_CBC, ENCRYPT, bf_cbc_enc_tv_template, BF_CBC_ENC_TEST_VECTORS); 1020 test_cipher("ecb(blowfish)", DECRYPT, bf_dec_tv_template,
1013 test_cipher ("blowfish", MODE_CBC, DECRYPT, bf_cbc_dec_tv_template, BF_CBC_DEC_TEST_VECTORS); 1021 BF_DEC_TEST_VECTORS);
1022 test_cipher("cbc(blowfish)", ENCRYPT, bf_cbc_enc_tv_template,
1023 BF_CBC_ENC_TEST_VECTORS);
1024 test_cipher("cbc(blowfish)", DECRYPT, bf_cbc_dec_tv_template,
1025 BF_CBC_DEC_TEST_VECTORS);
1014 break; 1026 break;
1015 1027
1016 case 8: 1028 case 8:
1017 test_cipher ("twofish", MODE_ECB, ENCRYPT, tf_enc_tv_template, TF_ENC_TEST_VECTORS); 1029 test_cipher("ecb(twofish)", ENCRYPT, tf_enc_tv_template,
1018 test_cipher ("twofish", MODE_ECB, DECRYPT, tf_dec_tv_template, TF_DEC_TEST_VECTORS); 1030 TF_ENC_TEST_VECTORS);
1019 test_cipher ("twofish", MODE_CBC, ENCRYPT, tf_cbc_enc_tv_template, TF_CBC_ENC_TEST_VECTORS); 1031 test_cipher("ecb(twofish)", DECRYPT, tf_dec_tv_template,
1020 test_cipher ("twofish", MODE_CBC, DECRYPT, tf_cbc_dec_tv_template, TF_CBC_DEC_TEST_VECTORS); 1032 TF_DEC_TEST_VECTORS);
1033 test_cipher("cbc(twofish)", ENCRYPT, tf_cbc_enc_tv_template,
1034 TF_CBC_ENC_TEST_VECTORS);
1035 test_cipher("cbc(twofish)", DECRYPT, tf_cbc_dec_tv_template,
1036 TF_CBC_DEC_TEST_VECTORS);
1021 break; 1037 break;
1022 1038
1023 case 9: 1039 case 9:
1024 test_cipher ("serpent", MODE_ECB, ENCRYPT, serpent_enc_tv_template, SERPENT_ENC_TEST_VECTORS); 1040 test_cipher("ecb(serpent)", ENCRYPT, serpent_enc_tv_template,
1025 test_cipher ("serpent", MODE_ECB, DECRYPT, serpent_dec_tv_template, SERPENT_DEC_TEST_VECTORS); 1041 SERPENT_ENC_TEST_VECTORS);
1042 test_cipher("ecb(serpent)", DECRYPT, serpent_dec_tv_template,
1043 SERPENT_DEC_TEST_VECTORS);
1026 break; 1044 break;
1027 1045
1028 case 10: 1046 case 10:
1029 test_cipher ("aes", MODE_ECB, ENCRYPT, aes_enc_tv_template, AES_ENC_TEST_VECTORS); 1047 test_cipher("ecb(aes)", ENCRYPT, aes_enc_tv_template,
1030 test_cipher ("aes", MODE_ECB, DECRYPT, aes_dec_tv_template, AES_DEC_TEST_VECTORS); 1048 AES_ENC_TEST_VECTORS);
1031 test_cipher ("aes", MODE_CBC, ENCRYPT, aes_cbc_enc_tv_template, AES_CBC_ENC_TEST_VECTORS); 1049 test_cipher("ecb(aes)", DECRYPT, aes_dec_tv_template,
1032 test_cipher ("aes", MODE_CBC, DECRYPT, aes_cbc_dec_tv_template, AES_CBC_DEC_TEST_VECTORS); 1050 AES_DEC_TEST_VECTORS);
1051 test_cipher("cbc(aes)", ENCRYPT, aes_cbc_enc_tv_template,
1052 AES_CBC_ENC_TEST_VECTORS);
1053 test_cipher("cbc(aes)", DECRYPT, aes_cbc_dec_tv_template,
1054 AES_CBC_DEC_TEST_VECTORS);
1033 break; 1055 break;
1034 1056
1035 case 11: 1057 case 11:
@@ -1045,18 +1067,24 @@ static void do_test(void)
1045 break; 1067 break;
1046 1068
1047 case 14: 1069 case 14:
1048 test_cipher ("cast5", MODE_ECB, ENCRYPT, cast5_enc_tv_template, CAST5_ENC_TEST_VECTORS); 1070 test_cipher("ecb(cast5)", ENCRYPT, cast5_enc_tv_template,
1049 test_cipher ("cast5", MODE_ECB, DECRYPT, cast5_dec_tv_template, CAST5_DEC_TEST_VECTORS); 1071 CAST5_ENC_TEST_VECTORS);
1072 test_cipher("ecb(cast5)", DECRYPT, cast5_dec_tv_template,
1073 CAST5_DEC_TEST_VECTORS);
1050 break; 1074 break;
1051 1075
1052 case 15: 1076 case 15:
1053 test_cipher ("cast6", MODE_ECB, ENCRYPT, cast6_enc_tv_template, CAST6_ENC_TEST_VECTORS); 1077 test_cipher("ecb(cast6)", ENCRYPT, cast6_enc_tv_template,
1054 test_cipher ("cast6", MODE_ECB, DECRYPT, cast6_dec_tv_template, CAST6_DEC_TEST_VECTORS); 1078 CAST6_ENC_TEST_VECTORS);
1079 test_cipher("ecb(cast6)", DECRYPT, cast6_dec_tv_template,
1080 CAST6_DEC_TEST_VECTORS);
1055 break; 1081 break;
1056 1082
1057 case 16: 1083 case 16:
1058 test_cipher ("arc4", MODE_ECB, ENCRYPT, arc4_enc_tv_template, ARC4_ENC_TEST_VECTORS); 1084 test_cipher("ecb(arc4)", ENCRYPT, arc4_enc_tv_template,
1059 test_cipher ("arc4", MODE_ECB, DECRYPT, arc4_dec_tv_template, ARC4_DEC_TEST_VECTORS); 1085 ARC4_ENC_TEST_VECTORS);
1086 test_cipher("ecb(arc4)", DECRYPT, arc4_dec_tv_template,
1087 ARC4_DEC_TEST_VECTORS);
1060 break; 1088 break;
1061 1089
1062 case 17: 1090 case 17:
@@ -1064,22 +1092,28 @@ static void do_test(void)
1064 break; 1092 break;
1065 1093
1066 case 18: 1094 case 18:
1067 test_crc32c(); 1095 test_hash("crc32c", crc32c_tv_template, CRC32C_TEST_VECTORS);
1068 break; 1096 break;
1069 1097
1070 case 19: 1098 case 19:
1071 test_cipher ("tea", MODE_ECB, ENCRYPT, tea_enc_tv_template, TEA_ENC_TEST_VECTORS); 1099 test_cipher("ecb(tea)", ENCRYPT, tea_enc_tv_template,
1072 test_cipher ("tea", MODE_ECB, DECRYPT, tea_dec_tv_template, TEA_DEC_TEST_VECTORS); 1100 TEA_ENC_TEST_VECTORS);
1101 test_cipher("ecb(tea)", DECRYPT, tea_dec_tv_template,
1102 TEA_DEC_TEST_VECTORS);
1073 break; 1103 break;
1074 1104
1075 case 20: 1105 case 20:
1076 test_cipher ("xtea", MODE_ECB, ENCRYPT, xtea_enc_tv_template, XTEA_ENC_TEST_VECTORS); 1106 test_cipher("ecb(xtea)", ENCRYPT, xtea_enc_tv_template,
1077 test_cipher ("xtea", MODE_ECB, DECRYPT, xtea_dec_tv_template, XTEA_DEC_TEST_VECTORS); 1107 XTEA_ENC_TEST_VECTORS);
1108 test_cipher("ecb(xtea)", DECRYPT, xtea_dec_tv_template,
1109 XTEA_DEC_TEST_VECTORS);
1078 break; 1110 break;
1079 1111
1080 case 21: 1112 case 21:
1081 test_cipher ("khazad", MODE_ECB, ENCRYPT, khazad_enc_tv_template, KHAZAD_ENC_TEST_VECTORS); 1113 test_cipher("ecb(khazad)", ENCRYPT, khazad_enc_tv_template,
1082 test_cipher ("khazad", MODE_ECB, DECRYPT, khazad_dec_tv_template, KHAZAD_DEC_TEST_VECTORS); 1114 KHAZAD_ENC_TEST_VECTORS);
1115 test_cipher("ecb(khazad)", DECRYPT, khazad_dec_tv_template,
1116 KHAZAD_DEC_TEST_VECTORS);
1083 break; 1117 break;
1084 1118
1085 case 22: 1119 case 22:
@@ -1095,15 +1129,21 @@ static void do_test(void)
1095 break; 1129 break;
1096 1130
1097 case 25: 1131 case 25:
1098 test_cipher ("tnepres", MODE_ECB, ENCRYPT, tnepres_enc_tv_template, TNEPRES_ENC_TEST_VECTORS); 1132 test_cipher("ecb(tnepres)", ENCRYPT, tnepres_enc_tv_template,
1099 test_cipher ("tnepres", MODE_ECB, DECRYPT, tnepres_dec_tv_template, TNEPRES_DEC_TEST_VECTORS); 1133 TNEPRES_ENC_TEST_VECTORS);
1134 test_cipher("ecb(tnepres)", DECRYPT, tnepres_dec_tv_template,
1135 TNEPRES_DEC_TEST_VECTORS);
1100 break; 1136 break;
1101 1137
1102 case 26: 1138 case 26:
1103 test_cipher ("anubis", MODE_ECB, ENCRYPT, anubis_enc_tv_template, ANUBIS_ENC_TEST_VECTORS); 1139 test_cipher("ecb(anubis)", ENCRYPT, anubis_enc_tv_template,
1104 test_cipher ("anubis", MODE_ECB, DECRYPT, anubis_dec_tv_template, ANUBIS_DEC_TEST_VECTORS); 1140 ANUBIS_ENC_TEST_VECTORS);
1105 test_cipher ("anubis", MODE_CBC, ENCRYPT, anubis_cbc_enc_tv_template, ANUBIS_CBC_ENC_TEST_VECTORS); 1141 test_cipher("ecb(anubis)", DECRYPT, anubis_dec_tv_template,
1106 test_cipher ("anubis", MODE_CBC, DECRYPT, anubis_cbc_dec_tv_template, ANUBIS_CBC_ENC_TEST_VECTORS); 1142 ANUBIS_DEC_TEST_VECTORS);
1143 test_cipher("cbc(anubis)", ENCRYPT, anubis_cbc_enc_tv_template,
1144 ANUBIS_CBC_ENC_TEST_VECTORS);
1145 test_cipher("cbc(anubis)", DECRYPT, anubis_cbc_dec_tv_template,
1146 ANUBIS_CBC_ENC_TEST_VECTORS);
1107 break; 1147 break;
1108 1148
1109 case 27: 1149 case 27:
@@ -1120,85 +1160,88 @@ static void do_test(void)
1120 break; 1160 break;
1121 1161
1122 case 30: 1162 case 30:
1123 test_cipher ("xeta", MODE_ECB, ENCRYPT, xeta_enc_tv_template, XETA_ENC_TEST_VECTORS); 1163 test_cipher("ecb(xeta)", ENCRYPT, xeta_enc_tv_template,
1124 test_cipher ("xeta", MODE_ECB, DECRYPT, xeta_dec_tv_template, XETA_DEC_TEST_VECTORS); 1164 XETA_ENC_TEST_VECTORS);
1165 test_cipher("ecb(xeta)", DECRYPT, xeta_dec_tv_template,
1166 XETA_DEC_TEST_VECTORS);
1125 break; 1167 break;
1126 1168
1127#ifdef CONFIG_CRYPTO_HMAC
1128 case 100: 1169 case 100:
1129 test_hmac("md5", hmac_md5_tv_template, HMAC_MD5_TEST_VECTORS); 1170 test_hash("hmac(md5)", hmac_md5_tv_template,
1171 HMAC_MD5_TEST_VECTORS);
1130 break; 1172 break;
1131 1173
1132 case 101: 1174 case 101:
1133 test_hmac("sha1", hmac_sha1_tv_template, HMAC_SHA1_TEST_VECTORS); 1175 test_hash("hmac(sha1)", hmac_sha1_tv_template,
1176 HMAC_SHA1_TEST_VECTORS);
1134 break; 1177 break;
1135 1178
1136 case 102: 1179 case 102:
1137 test_hmac("sha256", hmac_sha256_tv_template, HMAC_SHA256_TEST_VECTORS); 1180 test_hash("hmac(sha256)", hmac_sha256_tv_template,
1181 HMAC_SHA256_TEST_VECTORS);
1138 break; 1182 break;
1139 1183
1140#endif
1141 1184
1142 case 200: 1185 case 200:
1143 test_cipher_speed("aes", MODE_ECB, ENCRYPT, sec, NULL, 0, 1186 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1144 aes_speed_template); 1187 aes_speed_template);
1145 test_cipher_speed("aes", MODE_ECB, DECRYPT, sec, NULL, 0, 1188 test_cipher_speed("ecb(aes)", DECRYPT, sec, NULL, 0,
1146 aes_speed_template); 1189 aes_speed_template);
1147 test_cipher_speed("aes", MODE_CBC, ENCRYPT, sec, NULL, 0, 1190 test_cipher_speed("cbc(aes)", ENCRYPT, sec, NULL, 0,
1148 aes_speed_template); 1191 aes_speed_template);
1149 test_cipher_speed("aes", MODE_CBC, DECRYPT, sec, NULL, 0, 1192 test_cipher_speed("cbc(aes)", DECRYPT, sec, NULL, 0,
1150 aes_speed_template); 1193 aes_speed_template);
1151 break; 1194 break;
1152 1195
1153 case 201: 1196 case 201:
1154 test_cipher_speed("des3_ede", MODE_ECB, ENCRYPT, sec, 1197 test_cipher_speed("ecb(des3_ede)", ENCRYPT, sec,
1155 des3_ede_enc_tv_template, 1198 des3_ede_enc_tv_template,
1156 DES3_EDE_ENC_TEST_VECTORS, 1199 DES3_EDE_ENC_TEST_VECTORS,
1157 des3_ede_speed_template); 1200 des3_ede_speed_template);
1158 test_cipher_speed("des3_ede", MODE_ECB, DECRYPT, sec, 1201 test_cipher_speed("ecb(des3_ede)", DECRYPT, sec,
1159 des3_ede_dec_tv_template, 1202 des3_ede_dec_tv_template,
1160 DES3_EDE_DEC_TEST_VECTORS, 1203 DES3_EDE_DEC_TEST_VECTORS,
1161 des3_ede_speed_template); 1204 des3_ede_speed_template);
1162 test_cipher_speed("des3_ede", MODE_CBC, ENCRYPT, sec, 1205 test_cipher_speed("cbc(des3_ede)", ENCRYPT, sec,
1163 des3_ede_enc_tv_template, 1206 des3_ede_enc_tv_template,
1164 DES3_EDE_ENC_TEST_VECTORS, 1207 DES3_EDE_ENC_TEST_VECTORS,
1165 des3_ede_speed_template); 1208 des3_ede_speed_template);
1166 test_cipher_speed("des3_ede", MODE_CBC, DECRYPT, sec, 1209 test_cipher_speed("cbc(des3_ede)", DECRYPT, sec,
1167 des3_ede_dec_tv_template, 1210 des3_ede_dec_tv_template,
1168 DES3_EDE_DEC_TEST_VECTORS, 1211 DES3_EDE_DEC_TEST_VECTORS,
1169 des3_ede_speed_template); 1212 des3_ede_speed_template);
1170 break; 1213 break;
1171 1214
1172 case 202: 1215 case 202:
1173 test_cipher_speed("twofish", MODE_ECB, ENCRYPT, sec, NULL, 0, 1216 test_cipher_speed("ecb(twofish)", ENCRYPT, sec, NULL, 0,
1174 twofish_speed_template); 1217 twofish_speed_template);
1175 test_cipher_speed("twofish", MODE_ECB, DECRYPT, sec, NULL, 0, 1218 test_cipher_speed("ecb(twofish)", DECRYPT, sec, NULL, 0,
1176 twofish_speed_template); 1219 twofish_speed_template);
1177 test_cipher_speed("twofish", MODE_CBC, ENCRYPT, sec, NULL, 0, 1220 test_cipher_speed("cbc(twofish)", ENCRYPT, sec, NULL, 0,
1178 twofish_speed_template); 1221 twofish_speed_template);
1179 test_cipher_speed("twofish", MODE_CBC, DECRYPT, sec, NULL, 0, 1222 test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
1180 twofish_speed_template); 1223 twofish_speed_template);
1181 break; 1224 break;
1182 1225
1183 case 203: 1226 case 203:
1184 test_cipher_speed("blowfish", MODE_ECB, ENCRYPT, sec, NULL, 0, 1227 test_cipher_speed("ecb(blowfish)", ENCRYPT, sec, NULL, 0,
1185 blowfish_speed_template); 1228 blowfish_speed_template);
1186 test_cipher_speed("blowfish", MODE_ECB, DECRYPT, sec, NULL, 0, 1229 test_cipher_speed("ecb(blowfish)", DECRYPT, sec, NULL, 0,
1187 blowfish_speed_template); 1230 blowfish_speed_template);
1188 test_cipher_speed("blowfish", MODE_CBC, ENCRYPT, sec, NULL, 0, 1231 test_cipher_speed("cbc(blowfish)", ENCRYPT, sec, NULL, 0,
1189 blowfish_speed_template); 1232 blowfish_speed_template);
1190 test_cipher_speed("blowfish", MODE_CBC, DECRYPT, sec, NULL, 0, 1233 test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
1191 blowfish_speed_template); 1234 blowfish_speed_template);
1192 break; 1235 break;
1193 1236
1194 case 204: 1237 case 204:
1195 test_cipher_speed("des", MODE_ECB, ENCRYPT, sec, NULL, 0, 1238 test_cipher_speed("ecb(des)", ENCRYPT, sec, NULL, 0,
1196 des_speed_template); 1239 des_speed_template);
1197 test_cipher_speed("des", MODE_ECB, DECRYPT, sec, NULL, 0, 1240 test_cipher_speed("ecb(des)", DECRYPT, sec, NULL, 0,
1198 des_speed_template); 1241 des_speed_template);
1199 test_cipher_speed("des", MODE_CBC, ENCRYPT, sec, NULL, 0, 1242 test_cipher_speed("cbc(des)", ENCRYPT, sec, NULL, 0,
1200 des_speed_template); 1243 des_speed_template);
1201 test_cipher_speed("des", MODE_CBC, DECRYPT, sec, NULL, 0, 1244 test_cipher_speed("cbc(des)", DECRYPT, sec, NULL, 0,
1202 des_speed_template); 1245 des_speed_template);
1203 break; 1246 break;
1204 1247
@@ -1206,51 +1249,51 @@ static void do_test(void)
1206 /* fall through */ 1249 /* fall through */
1207 1250
1208 case 301: 1251 case 301:
1209 test_digest_speed("md4", sec, generic_digest_speed_template); 1252 test_hash_speed("md4", sec, generic_hash_speed_template);
1210 if (mode > 300 && mode < 400) break; 1253 if (mode > 300 && mode < 400) break;
1211 1254
1212 case 302: 1255 case 302:
1213 test_digest_speed("md5", sec, generic_digest_speed_template); 1256 test_hash_speed("md5", sec, generic_hash_speed_template);
1214 if (mode > 300 && mode < 400) break; 1257 if (mode > 300 && mode < 400) break;
1215 1258
1216 case 303: 1259 case 303:
1217 test_digest_speed("sha1", sec, generic_digest_speed_template); 1260 test_hash_speed("sha1", sec, generic_hash_speed_template);
1218 if (mode > 300 && mode < 400) break; 1261 if (mode > 300 && mode < 400) break;
1219 1262
1220 case 304: 1263 case 304:
1221 test_digest_speed("sha256", sec, generic_digest_speed_template); 1264 test_hash_speed("sha256", sec, generic_hash_speed_template);
1222 if (mode > 300 && mode < 400) break; 1265 if (mode > 300 && mode < 400) break;
1223 1266
1224 case 305: 1267 case 305:
1225 test_digest_speed("sha384", sec, generic_digest_speed_template); 1268 test_hash_speed("sha384", sec, generic_hash_speed_template);
1226 if (mode > 300 && mode < 400) break; 1269 if (mode > 300 && mode < 400) break;
1227 1270
1228 case 306: 1271 case 306:
1229 test_digest_speed("sha512", sec, generic_digest_speed_template); 1272 test_hash_speed("sha512", sec, generic_hash_speed_template);
1230 if (mode > 300 && mode < 400) break; 1273 if (mode > 300 && mode < 400) break;
1231 1274
1232 case 307: 1275 case 307:
1233 test_digest_speed("wp256", sec, generic_digest_speed_template); 1276 test_hash_speed("wp256", sec, generic_hash_speed_template);
1234 if (mode > 300 && mode < 400) break; 1277 if (mode > 300 && mode < 400) break;
1235 1278
1236 case 308: 1279 case 308:
1237 test_digest_speed("wp384", sec, generic_digest_speed_template); 1280 test_hash_speed("wp384", sec, generic_hash_speed_template);
1238 if (mode > 300 && mode < 400) break; 1281 if (mode > 300 && mode < 400) break;
1239 1282
1240 case 309: 1283 case 309:
1241 test_digest_speed("wp512", sec, generic_digest_speed_template); 1284 test_hash_speed("wp512", sec, generic_hash_speed_template);
1242 if (mode > 300 && mode < 400) break; 1285 if (mode > 300 && mode < 400) break;
1243 1286
1244 case 310: 1287 case 310:
1245 test_digest_speed("tgr128", sec, generic_digest_speed_template); 1288 test_hash_speed("tgr128", sec, generic_hash_speed_template);
1246 if (mode > 300 && mode < 400) break; 1289 if (mode > 300 && mode < 400) break;
1247 1290
1248 case 311: 1291 case 311:
1249 test_digest_speed("tgr160", sec, generic_digest_speed_template); 1292 test_hash_speed("tgr160", sec, generic_hash_speed_template);
1250 if (mode > 300 && mode < 400) break; 1293 if (mode > 300 && mode < 400) break;
1251 1294
1252 case 312: 1295 case 312:
1253 test_digest_speed("tgr192", sec, generic_digest_speed_template); 1296 test_hash_speed("tgr192", sec, generic_hash_speed_template);
1254 if (mode > 300 && mode < 400) break; 1297 if (mode > 300 && mode < 400) break;
1255 1298
1256 case 399: 1299 case 399:
diff --git a/crypto/tcrypt.h b/crypto/tcrypt.h
index 1fac5602f633..a40c4411729e 100644
--- a/crypto/tcrypt.h
+++ b/crypto/tcrypt.h
@@ -28,7 +28,7 @@
28struct hash_testvec { 28struct hash_testvec {
29 /* only used with keyed hash algorithms */ 29 /* only used with keyed hash algorithms */
30 char key[128] __attribute__ ((__aligned__(4))); 30 char key[128] __attribute__ ((__aligned__(4)));
31 char plaintext[128]; 31 char plaintext[240];
32 char digest[MAX_DIGEST_SIZE]; 32 char digest[MAX_DIGEST_SIZE];
33 unsigned char tap[MAX_TAP]; 33 unsigned char tap[MAX_TAP];
34 unsigned char psize; 34 unsigned char psize;
@@ -36,16 +36,6 @@ struct hash_testvec {
36 unsigned char ksize; 36 unsigned char ksize;
37}; 37};
38 38
39struct hmac_testvec {
40 char key[128];
41 char plaintext[128];
42 char digest[MAX_DIGEST_SIZE];
43 unsigned char tap[MAX_TAP];
44 unsigned char ksize;
45 unsigned char psize;
46 unsigned char np;
47};
48
49struct cipher_testvec { 39struct cipher_testvec {
50 char key[MAX_KEYLEN] __attribute__ ((__aligned__(4))); 40 char key[MAX_KEYLEN] __attribute__ ((__aligned__(4)));
51 char iv[MAX_IVLEN]; 41 char iv[MAX_IVLEN];
@@ -65,7 +55,7 @@ struct cipher_speed {
65 unsigned int blen; 55 unsigned int blen;
66}; 56};
67 57
68struct digest_speed { 58struct hash_speed {
69 unsigned int blen; /* buffer length */ 59 unsigned int blen; /* buffer length */
70 unsigned int plen; /* per-update length */ 60 unsigned int plen; /* per-update length */
71}; 61};
@@ -697,14 +687,13 @@ static struct hash_testvec tgr128_tv_template[] = {
697 }, 687 },
698}; 688};
699 689
700#ifdef CONFIG_CRYPTO_HMAC
701/* 690/*
702 * HMAC-MD5 test vectors from RFC2202 691 * HMAC-MD5 test vectors from RFC2202
703 * (These need to be fixed to not use strlen). 692 * (These need to be fixed to not use strlen).
704 */ 693 */
705#define HMAC_MD5_TEST_VECTORS 7 694#define HMAC_MD5_TEST_VECTORS 7
706 695
707static struct hmac_testvec hmac_md5_tv_template[] = 696static struct hash_testvec hmac_md5_tv_template[] =
708{ 697{
709 { 698 {
710 .key = { [0 ... 15] = 0x0b }, 699 .key = { [0 ... 15] = 0x0b },
@@ -768,7 +757,7 @@ static struct hmac_testvec hmac_md5_tv_template[] =
768 */ 757 */
769#define HMAC_SHA1_TEST_VECTORS 7 758#define HMAC_SHA1_TEST_VECTORS 7
770 759
771static struct hmac_testvec hmac_sha1_tv_template[] = { 760static struct hash_testvec hmac_sha1_tv_template[] = {
772 { 761 {
773 .key = { [0 ... 19] = 0x0b }, 762 .key = { [0 ... 19] = 0x0b },
774 .ksize = 20, 763 .ksize = 20,
@@ -833,7 +822,7 @@ static struct hmac_testvec hmac_sha1_tv_template[] = {
833 */ 822 */
834#define HMAC_SHA256_TEST_VECTORS 10 823#define HMAC_SHA256_TEST_VECTORS 10
835 824
836static struct hmac_testvec hmac_sha256_tv_template[] = { 825static struct hash_testvec hmac_sha256_tv_template[] = {
837 { 826 {
838 .key = { 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 827 .key = { 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
839 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 828 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10,
@@ -944,8 +933,6 @@ static struct hmac_testvec hmac_sha256_tv_template[] = {
944 }, 933 },
945}; 934};
946 935
947#endif /* CONFIG_CRYPTO_HMAC */
948
949/* 936/*
950 * DES test vectors. 937 * DES test vectors.
951 */ 938 */
@@ -2897,6 +2884,183 @@ static struct hash_testvec michael_mic_tv_template[] = {
2897}; 2884};
2898 2885
2899/* 2886/*
2887 * CRC32C test vectors
2888 */
2889#define CRC32C_TEST_VECTORS 14
2890
2891static struct hash_testvec crc32c_tv_template[] = {
2892 {
2893 .psize = 0,
2894 .digest = { 0x00, 0x00, 0x00, 0x00 }
2895 },
2896 {
2897 .key = { 0x87, 0xa9, 0xcb, 0xed },
2898 .ksize = 4,
2899 .psize = 0,
2900 .digest = { 0x78, 0x56, 0x34, 0x12 },
2901 },
2902 {
2903 .key = { 0xff, 0xff, 0xff, 0xff },
2904 .ksize = 4,
2905 .plaintext = { 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
2906 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10,
2907 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
2908 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20,
2909 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28 },
2910 .psize = 40,
2911 .digest = { 0x7f, 0x15, 0x2c, 0x0e }
2912 },
2913 {
2914 .key = { 0xff, 0xff, 0xff, 0xff },
2915 .ksize = 4,
2916 .plaintext = { 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30,
2917 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38,
2918 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, 0x40,
2919 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
2920 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50 },
2921 .psize = 40,
2922 .digest = { 0xf6, 0xeb, 0x80, 0xe9 }
2923 },
2924 {
2925 .key = { 0xff, 0xff, 0xff, 0xff },
2926 .ksize = 4,
2927 .plaintext = { 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
2928 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f, 0x60,
2929 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
2930 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, 0x70,
2931 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78 },
2932 .psize = 40,
2933 .digest = { 0xed, 0xbd, 0x74, 0xde }
2934 },
2935 {
2936 .key = { 0xff, 0xff, 0xff, 0xff },
2937 .ksize = 4,
2938 .plaintext = { 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f, 0x80,
2939 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88,
2940 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, 0x90,
2941 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
2942 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f, 0xa0 },
2943 .psize = 40,
2944 .digest = { 0x62, 0xc8, 0x79, 0xd5 }
2945 },
2946 {
2947 .key = { 0xff, 0xff, 0xff, 0xff },
2948 .ksize = 4,
2949 .plaintext = { 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8,
2950 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf, 0xb0,
2951 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8,
2952 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf, 0xc0,
2953 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7, 0xc8 },
2954 .psize = 40,
2955 .digest = { 0xd0, 0x9a, 0x97, 0xba }
2956 },
2957 {
2958 .key = { 0xff, 0xff, 0xff, 0xff },
2959 .ksize = 4,
2960 .plaintext = { 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf, 0xd0,
2961 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8,
2962 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf, 0xe0,
2963 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8,
2964 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef, 0xf0 },
2965 .psize = 40,
2966 .digest = { 0x13, 0xd9, 0x29, 0x2b }
2967 },
2968 {
2969 .key = { 0x80, 0xea, 0xd3, 0xf1 },
2970 .ksize = 4,
2971 .plaintext = { 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30,
2972 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38,
2973 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, 0x40,
2974 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
2975 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50 },
2976 .psize = 40,
2977 .digest = { 0x0c, 0xb5, 0xe2, 0xa2 }
2978 },
2979 {
2980 .key = { 0xf3, 0x4a, 0x1d, 0x5d },
2981 .ksize = 4,
2982 .plaintext = { 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
2983 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f, 0x60,
2984 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
2985 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, 0x70,
2986 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78 },
2987 .psize = 40,
2988 .digest = { 0xd1, 0x7f, 0xfb, 0xa6 }
2989 },
2990 {
2991 .key = { 0x2e, 0x80, 0x04, 0x59 },
2992 .ksize = 4,
2993 .plaintext = { 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f, 0x80,
2994 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88,
2995 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, 0x90,
2996 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
2997 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f, 0xa0 },
2998 .psize = 40,
2999 .digest = { 0x59, 0x33, 0xe6, 0x7a }
3000 },
3001 {
3002 .key = { 0xa6, 0xcc, 0x19, 0x85 },
3003 .ksize = 4,
3004 .plaintext = { 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8,
3005 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf, 0xb0,
3006 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8,
3007 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf, 0xc0,
3008 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7, 0xc8 },
3009 .psize = 40,
3010 .digest = { 0xbe, 0x03, 0x01, 0xd2 }
3011 },
3012 {
3013 .key = { 0x41, 0xfc, 0xfe, 0x2d },
3014 .ksize = 4,
3015 .plaintext = { 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf, 0xd0,
3016 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8,
3017 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf, 0xe0,
3018 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8,
3019 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef, 0xf0 },
3020 .psize = 40,
3021 .digest = { 0x75, 0xd3, 0xc5, 0x24 }
3022 },
3023 {
3024 .key = { 0xff, 0xff, 0xff, 0xff },
3025 .ksize = 4,
3026 .plaintext = { 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
3027 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10,
3028 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
3029 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20,
3030 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28,
3031 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30,
3032 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38,
3033 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, 0x40,
3034 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
3035 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50,
3036 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
3037 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f, 0x60,
3038 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
3039 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, 0x70,
3040 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
3041 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f, 0x80,
3042 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88,
3043 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, 0x90,
3044 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
3045 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f, 0xa0,
3046 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8,
3047 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf, 0xb0,
3048 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8,
3049 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf, 0xc0,
3050 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7, 0xc8,
3051 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf, 0xd0,
3052 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8,
3053 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf, 0xe0,
3054 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8,
3055 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef, 0xf0 },
3056 .psize = 240,
3057 .digest = { 0x75, 0xd3, 0xc5, 0x24 },
3058 .np = 2,
3059 .tap = { 31, 209 }
3060 },
3061};
3062
3063/*
2900 * Cipher speed tests 3064 * Cipher speed tests
2901 */ 3065 */
2902static struct cipher_speed aes_speed_template[] = { 3066static struct cipher_speed aes_speed_template[] = {
@@ -2983,7 +3147,7 @@ static struct cipher_speed des_speed_template[] = {
2983/* 3147/*
2984 * Digest speed tests 3148 * Digest speed tests
2985 */ 3149 */
2986static struct digest_speed generic_digest_speed_template[] = { 3150static struct hash_speed generic_hash_speed_template[] = {
2987 { .blen = 16, .plen = 16, }, 3151 { .blen = 16, .plen = 16, },
2988 { .blen = 64, .plen = 16, }, 3152 { .blen = 64, .plen = 16, },
2989 { .blen = 64, .plen = 64, }, 3153 { .blen = 64, .plen = 64, },
diff --git a/crypto/tea.c b/crypto/tea.c
index 5367adc82fc9..1c54e26fa529 100644
--- a/crypto/tea.c
+++ b/crypto/tea.c
@@ -46,16 +46,10 @@ struct xtea_ctx {
46}; 46};
47 47
48static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key, 48static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
49 unsigned int key_len, u32 *flags) 49 unsigned int key_len)
50{ 50{
51 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); 51 struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
52 const __le32 *key = (const __le32 *)in_key; 52 const __le32 *key = (const __le32 *)in_key;
53
54 if (key_len != 16)
55 {
56 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
57 return -EINVAL;
58 }
59 53
60 ctx->KEY[0] = le32_to_cpu(key[0]); 54 ctx->KEY[0] = le32_to_cpu(key[0]);
61 ctx->KEY[1] = le32_to_cpu(key[1]); 55 ctx->KEY[1] = le32_to_cpu(key[1]);
@@ -125,16 +119,10 @@ static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
125} 119}
126 120
127static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key, 121static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
128 unsigned int key_len, u32 *flags) 122 unsigned int key_len)
129{ 123{
130 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); 124 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
131 const __le32 *key = (const __le32 *)in_key; 125 const __le32 *key = (const __le32 *)in_key;
132
133 if (key_len != 16)
134 {
135 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
136 return -EINVAL;
137 }
138 126
139 ctx->KEY[0] = le32_to_cpu(key[0]); 127 ctx->KEY[0] = le32_to_cpu(key[0]);
140 ctx->KEY[1] = le32_to_cpu(key[1]); 128 ctx->KEY[1] = le32_to_cpu(key[1]);
diff --git a/crypto/twofish.c b/crypto/twofish.c
index ec2488242e2d..4979a2be48a9 100644
--- a/crypto/twofish.c
+++ b/crypto/twofish.c
@@ -39,6 +39,7 @@
39 */ 39 */
40 40
41#include <asm/byteorder.h> 41#include <asm/byteorder.h>
42#include <crypto/twofish.h>
42#include <linux/module.h> 43#include <linux/module.h>
43#include <linux/init.h> 44#include <linux/init.h>
44#include <linux/types.h> 45#include <linux/types.h>
@@ -46,534 +47,6 @@
46#include <linux/crypto.h> 47#include <linux/crypto.h>
47#include <linux/bitops.h> 48#include <linux/bitops.h>
48 49
49
50/* The large precomputed tables for the Twofish cipher (twofish.c)
51 * Taken from the same source as twofish.c
52 * Marc Mutz <Marc@Mutz.com>
53 */
54
55/* These two tables are the q0 and q1 permutations, exactly as described in
56 * the Twofish paper. */
57
58static const u8 q0[256] = {
59 0xA9, 0x67, 0xB3, 0xE8, 0x04, 0xFD, 0xA3, 0x76, 0x9A, 0x92, 0x80, 0x78,
60 0xE4, 0xDD, 0xD1, 0x38, 0x0D, 0xC6, 0x35, 0x98, 0x18, 0xF7, 0xEC, 0x6C,
61 0x43, 0x75, 0x37, 0x26, 0xFA, 0x13, 0x94, 0x48, 0xF2, 0xD0, 0x8B, 0x30,
62 0x84, 0x54, 0xDF, 0x23, 0x19, 0x5B, 0x3D, 0x59, 0xF3, 0xAE, 0xA2, 0x82,
63 0x63, 0x01, 0x83, 0x2E, 0xD9, 0x51, 0x9B, 0x7C, 0xA6, 0xEB, 0xA5, 0xBE,
64 0x16, 0x0C, 0xE3, 0x61, 0xC0, 0x8C, 0x3A, 0xF5, 0x73, 0x2C, 0x25, 0x0B,
65 0xBB, 0x4E, 0x89, 0x6B, 0x53, 0x6A, 0xB4, 0xF1, 0xE1, 0xE6, 0xBD, 0x45,
66 0xE2, 0xF4, 0xB6, 0x66, 0xCC, 0x95, 0x03, 0x56, 0xD4, 0x1C, 0x1E, 0xD7,
67 0xFB, 0xC3, 0x8E, 0xB5, 0xE9, 0xCF, 0xBF, 0xBA, 0xEA, 0x77, 0x39, 0xAF,
68 0x33, 0xC9, 0x62, 0x71, 0x81, 0x79, 0x09, 0xAD, 0x24, 0xCD, 0xF9, 0xD8,
69 0xE5, 0xC5, 0xB9, 0x4D, 0x44, 0x08, 0x86, 0xE7, 0xA1, 0x1D, 0xAA, 0xED,
70 0x06, 0x70, 0xB2, 0xD2, 0x41, 0x7B, 0xA0, 0x11, 0x31, 0xC2, 0x27, 0x90,
71 0x20, 0xF6, 0x60, 0xFF, 0x96, 0x5C, 0xB1, 0xAB, 0x9E, 0x9C, 0x52, 0x1B,
72 0x5F, 0x93, 0x0A, 0xEF, 0x91, 0x85, 0x49, 0xEE, 0x2D, 0x4F, 0x8F, 0x3B,
73 0x47, 0x87, 0x6D, 0x46, 0xD6, 0x3E, 0x69, 0x64, 0x2A, 0xCE, 0xCB, 0x2F,
74 0xFC, 0x97, 0x05, 0x7A, 0xAC, 0x7F, 0xD5, 0x1A, 0x4B, 0x0E, 0xA7, 0x5A,
75 0x28, 0x14, 0x3F, 0x29, 0x88, 0x3C, 0x4C, 0x02, 0xB8, 0xDA, 0xB0, 0x17,
76 0x55, 0x1F, 0x8A, 0x7D, 0x57, 0xC7, 0x8D, 0x74, 0xB7, 0xC4, 0x9F, 0x72,
77 0x7E, 0x15, 0x22, 0x12, 0x58, 0x07, 0x99, 0x34, 0x6E, 0x50, 0xDE, 0x68,
78 0x65, 0xBC, 0xDB, 0xF8, 0xC8, 0xA8, 0x2B, 0x40, 0xDC, 0xFE, 0x32, 0xA4,
79 0xCA, 0x10, 0x21, 0xF0, 0xD3, 0x5D, 0x0F, 0x00, 0x6F, 0x9D, 0x36, 0x42,
80 0x4A, 0x5E, 0xC1, 0xE0
81};
82
83static const u8 q1[256] = {
84 0x75, 0xF3, 0xC6, 0xF4, 0xDB, 0x7B, 0xFB, 0xC8, 0x4A, 0xD3, 0xE6, 0x6B,
85 0x45, 0x7D, 0xE8, 0x4B, 0xD6, 0x32, 0xD8, 0xFD, 0x37, 0x71, 0xF1, 0xE1,
86 0x30, 0x0F, 0xF8, 0x1B, 0x87, 0xFA, 0x06, 0x3F, 0x5E, 0xBA, 0xAE, 0x5B,
87 0x8A, 0x00, 0xBC, 0x9D, 0x6D, 0xC1, 0xB1, 0x0E, 0x80, 0x5D, 0xD2, 0xD5,
88 0xA0, 0x84, 0x07, 0x14, 0xB5, 0x90, 0x2C, 0xA3, 0xB2, 0x73, 0x4C, 0x54,
89 0x92, 0x74, 0x36, 0x51, 0x38, 0xB0, 0xBD, 0x5A, 0xFC, 0x60, 0x62, 0x96,
90 0x6C, 0x42, 0xF7, 0x10, 0x7C, 0x28, 0x27, 0x8C, 0x13, 0x95, 0x9C, 0xC7,
91 0x24, 0x46, 0x3B, 0x70, 0xCA, 0xE3, 0x85, 0xCB, 0x11, 0xD0, 0x93, 0xB8,
92 0xA6, 0x83, 0x20, 0xFF, 0x9F, 0x77, 0xC3, 0xCC, 0x03, 0x6F, 0x08, 0xBF,
93 0x40, 0xE7, 0x2B, 0xE2, 0x79, 0x0C, 0xAA, 0x82, 0x41, 0x3A, 0xEA, 0xB9,
94 0xE4, 0x9A, 0xA4, 0x97, 0x7E, 0xDA, 0x7A, 0x17, 0x66, 0x94, 0xA1, 0x1D,
95 0x3D, 0xF0, 0xDE, 0xB3, 0x0B, 0x72, 0xA7, 0x1C, 0xEF, 0xD1, 0x53, 0x3E,
96 0x8F, 0x33, 0x26, 0x5F, 0xEC, 0x76, 0x2A, 0x49, 0x81, 0x88, 0xEE, 0x21,
97 0xC4, 0x1A, 0xEB, 0xD9, 0xC5, 0x39, 0x99, 0xCD, 0xAD, 0x31, 0x8B, 0x01,
98 0x18, 0x23, 0xDD, 0x1F, 0x4E, 0x2D, 0xF9, 0x48, 0x4F, 0xF2, 0x65, 0x8E,
99 0x78, 0x5C, 0x58, 0x19, 0x8D, 0xE5, 0x98, 0x57, 0x67, 0x7F, 0x05, 0x64,
100 0xAF, 0x63, 0xB6, 0xFE, 0xF5, 0xB7, 0x3C, 0xA5, 0xCE, 0xE9, 0x68, 0x44,
101 0xE0, 0x4D, 0x43, 0x69, 0x29, 0x2E, 0xAC, 0x15, 0x59, 0xA8, 0x0A, 0x9E,
102 0x6E, 0x47, 0xDF, 0x34, 0x35, 0x6A, 0xCF, 0xDC, 0x22, 0xC9, 0xC0, 0x9B,
103 0x89, 0xD4, 0xED, 0xAB, 0x12, 0xA2, 0x0D, 0x52, 0xBB, 0x02, 0x2F, 0xA9,
104 0xD7, 0x61, 0x1E, 0xB4, 0x50, 0x04, 0xF6, 0xC2, 0x16, 0x25, 0x86, 0x56,
105 0x55, 0x09, 0xBE, 0x91
106};
107
108/* These MDS tables are actually tables of MDS composed with q0 and q1,
109 * because it is only ever used that way and we can save some time by
110 * precomputing. Of course the main saving comes from precomputing the
111 * GF(2^8) multiplication involved in the MDS matrix multiply; by looking
112 * things up in these tables we reduce the matrix multiply to four lookups
113 * and three XORs. Semi-formally, the definition of these tables is:
114 * mds[0][i] = MDS (q1[i] 0 0 0)^T mds[1][i] = MDS (0 q0[i] 0 0)^T
115 * mds[2][i] = MDS (0 0 q1[i] 0)^T mds[3][i] = MDS (0 0 0 q0[i])^T
116 * where ^T means "transpose", the matrix multiply is performed in GF(2^8)
117 * represented as GF(2)[x]/v(x) where v(x)=x^8+x^6+x^5+x^3+1 as described
118 * by Schneier et al, and I'm casually glossing over the byte/word
119 * conversion issues. */
120
121static const u32 mds[4][256] = {
122 {0xBCBC3275, 0xECEC21F3, 0x202043C6, 0xB3B3C9F4, 0xDADA03DB, 0x02028B7B,
123 0xE2E22BFB, 0x9E9EFAC8, 0xC9C9EC4A, 0xD4D409D3, 0x18186BE6, 0x1E1E9F6B,
124 0x98980E45, 0xB2B2387D, 0xA6A6D2E8, 0x2626B74B, 0x3C3C57D6, 0x93938A32,
125 0x8282EED8, 0x525298FD, 0x7B7BD437, 0xBBBB3771, 0x5B5B97F1, 0x474783E1,
126 0x24243C30, 0x5151E20F, 0xBABAC6F8, 0x4A4AF31B, 0xBFBF4887, 0x0D0D70FA,
127 0xB0B0B306, 0x7575DE3F, 0xD2D2FD5E, 0x7D7D20BA, 0x666631AE, 0x3A3AA35B,
128 0x59591C8A, 0x00000000, 0xCDCD93BC, 0x1A1AE09D, 0xAEAE2C6D, 0x7F7FABC1,
129 0x2B2BC7B1, 0xBEBEB90E, 0xE0E0A080, 0x8A8A105D, 0x3B3B52D2, 0x6464BAD5,
130 0xD8D888A0, 0xE7E7A584, 0x5F5FE807, 0x1B1B1114, 0x2C2CC2B5, 0xFCFCB490,
131 0x3131272C, 0x808065A3, 0x73732AB2, 0x0C0C8173, 0x79795F4C, 0x6B6B4154,
132 0x4B4B0292, 0x53536974, 0x94948F36, 0x83831F51, 0x2A2A3638, 0xC4C49CB0,
133 0x2222C8BD, 0xD5D5F85A, 0xBDBDC3FC, 0x48487860, 0xFFFFCE62, 0x4C4C0796,
134 0x4141776C, 0xC7C7E642, 0xEBEB24F7, 0x1C1C1410, 0x5D5D637C, 0x36362228,
135 0x6767C027, 0xE9E9AF8C, 0x4444F913, 0x1414EA95, 0xF5F5BB9C, 0xCFCF18C7,
136 0x3F3F2D24, 0xC0C0E346, 0x7272DB3B, 0x54546C70, 0x29294CCA, 0xF0F035E3,
137 0x0808FE85, 0xC6C617CB, 0xF3F34F11, 0x8C8CE4D0, 0xA4A45993, 0xCACA96B8,
138 0x68683BA6, 0xB8B84D83, 0x38382820, 0xE5E52EFF, 0xADAD569F, 0x0B0B8477,
139 0xC8C81DC3, 0x9999FFCC, 0x5858ED03, 0x19199A6F, 0x0E0E0A08, 0x95957EBF,
140 0x70705040, 0xF7F730E7, 0x6E6ECF2B, 0x1F1F6EE2, 0xB5B53D79, 0x09090F0C,
141 0x616134AA, 0x57571682, 0x9F9F0B41, 0x9D9D803A, 0x111164EA, 0x2525CDB9,
142 0xAFAFDDE4, 0x4545089A, 0xDFDF8DA4, 0xA3A35C97, 0xEAEAD57E, 0x353558DA,
143 0xEDEDD07A, 0x4343FC17, 0xF8F8CB66, 0xFBFBB194, 0x3737D3A1, 0xFAFA401D,
144 0xC2C2683D, 0xB4B4CCF0, 0x32325DDE, 0x9C9C71B3, 0x5656E70B, 0xE3E3DA72,
145 0x878760A7, 0x15151B1C, 0xF9F93AEF, 0x6363BFD1, 0x3434A953, 0x9A9A853E,
146 0xB1B1428F, 0x7C7CD133, 0x88889B26, 0x3D3DA65F, 0xA1A1D7EC, 0xE4E4DF76,
147 0x8181942A, 0x91910149, 0x0F0FFB81, 0xEEEEAA88, 0x161661EE, 0xD7D77321,
148 0x9797F5C4, 0xA5A5A81A, 0xFEFE3FEB, 0x6D6DB5D9, 0x7878AEC5, 0xC5C56D39,
149 0x1D1DE599, 0x7676A4CD, 0x3E3EDCAD, 0xCBCB6731, 0xB6B6478B, 0xEFEF5B01,
150 0x12121E18, 0x6060C523, 0x6A6AB0DD, 0x4D4DF61F, 0xCECEE94E, 0xDEDE7C2D,
151 0x55559DF9, 0x7E7E5A48, 0x2121B24F, 0x03037AF2, 0xA0A02665, 0x5E5E198E,
152 0x5A5A6678, 0x65654B5C, 0x62624E58, 0xFDFD4519, 0x0606F48D, 0x404086E5,
153 0xF2F2BE98, 0x3333AC57, 0x17179067, 0x05058E7F, 0xE8E85E05, 0x4F4F7D64,
154 0x89896AAF, 0x10109563, 0x74742FB6, 0x0A0A75FE, 0x5C5C92F5, 0x9B9B74B7,
155 0x2D2D333C, 0x3030D6A5, 0x2E2E49CE, 0x494989E9, 0x46467268, 0x77775544,
156 0xA8A8D8E0, 0x9696044D, 0x2828BD43, 0xA9A92969, 0xD9D97929, 0x8686912E,
157 0xD1D187AC, 0xF4F44A15, 0x8D8D1559, 0xD6D682A8, 0xB9B9BC0A, 0x42420D9E,
158 0xF6F6C16E, 0x2F2FB847, 0xDDDD06DF, 0x23233934, 0xCCCC6235, 0xF1F1C46A,
159 0xC1C112CF, 0x8585EBDC, 0x8F8F9E22, 0x7171A1C9, 0x9090F0C0, 0xAAAA539B,
160 0x0101F189, 0x8B8BE1D4, 0x4E4E8CED, 0x8E8E6FAB, 0xABABA212, 0x6F6F3EA2,
161 0xE6E6540D, 0xDBDBF252, 0x92927BBB, 0xB7B7B602, 0x6969CA2F, 0x3939D9A9,
162 0xD3D30CD7, 0xA7A72361, 0xA2A2AD1E, 0xC3C399B4, 0x6C6C4450, 0x07070504,
163 0x04047FF6, 0x272746C2, 0xACACA716, 0xD0D07625, 0x50501386, 0xDCDCF756,
164 0x84841A55, 0xE1E15109, 0x7A7A25BE, 0x1313EF91},
165
166 {0xA9D93939, 0x67901717, 0xB3719C9C, 0xE8D2A6A6, 0x04050707, 0xFD985252,
167 0xA3658080, 0x76DFE4E4, 0x9A084545, 0x92024B4B, 0x80A0E0E0, 0x78665A5A,
168 0xE4DDAFAF, 0xDDB06A6A, 0xD1BF6363, 0x38362A2A, 0x0D54E6E6, 0xC6432020,
169 0x3562CCCC, 0x98BEF2F2, 0x181E1212, 0xF724EBEB, 0xECD7A1A1, 0x6C774141,
170 0x43BD2828, 0x7532BCBC, 0x37D47B7B, 0x269B8888, 0xFA700D0D, 0x13F94444,
171 0x94B1FBFB, 0x485A7E7E, 0xF27A0303, 0xD0E48C8C, 0x8B47B6B6, 0x303C2424,
172 0x84A5E7E7, 0x54416B6B, 0xDF06DDDD, 0x23C56060, 0x1945FDFD, 0x5BA33A3A,
173 0x3D68C2C2, 0x59158D8D, 0xF321ECEC, 0xAE316666, 0xA23E6F6F, 0x82165757,
174 0x63951010, 0x015BEFEF, 0x834DB8B8, 0x2E918686, 0xD9B56D6D, 0x511F8383,
175 0x9B53AAAA, 0x7C635D5D, 0xA63B6868, 0xEB3FFEFE, 0xA5D63030, 0xBE257A7A,
176 0x16A7ACAC, 0x0C0F0909, 0xE335F0F0, 0x6123A7A7, 0xC0F09090, 0x8CAFE9E9,
177 0x3A809D9D, 0xF5925C5C, 0x73810C0C, 0x2C273131, 0x2576D0D0, 0x0BE75656,
178 0xBB7B9292, 0x4EE9CECE, 0x89F10101, 0x6B9F1E1E, 0x53A93434, 0x6AC4F1F1,
179 0xB499C3C3, 0xF1975B5B, 0xE1834747, 0xE66B1818, 0xBDC82222, 0x450E9898,
180 0xE26E1F1F, 0xF4C9B3B3, 0xB62F7474, 0x66CBF8F8, 0xCCFF9999, 0x95EA1414,
181 0x03ED5858, 0x56F7DCDC, 0xD4E18B8B, 0x1C1B1515, 0x1EADA2A2, 0xD70CD3D3,
182 0xFB2BE2E2, 0xC31DC8C8, 0x8E195E5E, 0xB5C22C2C, 0xE9894949, 0xCF12C1C1,
183 0xBF7E9595, 0xBA207D7D, 0xEA641111, 0x77840B0B, 0x396DC5C5, 0xAF6A8989,
184 0x33D17C7C, 0xC9A17171, 0x62CEFFFF, 0x7137BBBB, 0x81FB0F0F, 0x793DB5B5,
185 0x0951E1E1, 0xADDC3E3E, 0x242D3F3F, 0xCDA47676, 0xF99D5555, 0xD8EE8282,
186 0xE5864040, 0xC5AE7878, 0xB9CD2525, 0x4D049696, 0x44557777, 0x080A0E0E,
187 0x86135050, 0xE730F7F7, 0xA1D33737, 0x1D40FAFA, 0xAA346161, 0xED8C4E4E,
188 0x06B3B0B0, 0x706C5454, 0xB22A7373, 0xD2523B3B, 0x410B9F9F, 0x7B8B0202,
189 0xA088D8D8, 0x114FF3F3, 0x3167CBCB, 0xC2462727, 0x27C06767, 0x90B4FCFC,
190 0x20283838, 0xF67F0404, 0x60784848, 0xFF2EE5E5, 0x96074C4C, 0x5C4B6565,
191 0xB1C72B2B, 0xAB6F8E8E, 0x9E0D4242, 0x9CBBF5F5, 0x52F2DBDB, 0x1BF34A4A,
192 0x5FA63D3D, 0x9359A4A4, 0x0ABCB9B9, 0xEF3AF9F9, 0x91EF1313, 0x85FE0808,
193 0x49019191, 0xEE611616, 0x2D7CDEDE, 0x4FB22121, 0x8F42B1B1, 0x3BDB7272,
194 0x47B82F2F, 0x8748BFBF, 0x6D2CAEAE, 0x46E3C0C0, 0xD6573C3C, 0x3E859A9A,
195 0x6929A9A9, 0x647D4F4F, 0x2A948181, 0xCE492E2E, 0xCB17C6C6, 0x2FCA6969,
196 0xFCC3BDBD, 0x975CA3A3, 0x055EE8E8, 0x7AD0EDED, 0xAC87D1D1, 0x7F8E0505,
197 0xD5BA6464, 0x1AA8A5A5, 0x4BB72626, 0x0EB9BEBE, 0xA7608787, 0x5AF8D5D5,
198 0x28223636, 0x14111B1B, 0x3FDE7575, 0x2979D9D9, 0x88AAEEEE, 0x3C332D2D,
199 0x4C5F7979, 0x02B6B7B7, 0xB896CACA, 0xDA583535, 0xB09CC4C4, 0x17FC4343,
200 0x551A8484, 0x1FF64D4D, 0x8A1C5959, 0x7D38B2B2, 0x57AC3333, 0xC718CFCF,
201 0x8DF40606, 0x74695353, 0xB7749B9B, 0xC4F59797, 0x9F56ADAD, 0x72DAE3E3,
202 0x7ED5EAEA, 0x154AF4F4, 0x229E8F8F, 0x12A2ABAB, 0x584E6262, 0x07E85F5F,
203 0x99E51D1D, 0x34392323, 0x6EC1F6F6, 0x50446C6C, 0xDE5D3232, 0x68724646,
204 0x6526A0A0, 0xBC93CDCD, 0xDB03DADA, 0xF8C6BABA, 0xC8FA9E9E, 0xA882D6D6,
205 0x2BCF6E6E, 0x40507070, 0xDCEB8585, 0xFE750A0A, 0x328A9393, 0xA48DDFDF,
206 0xCA4C2929, 0x10141C1C, 0x2173D7D7, 0xF0CCB4B4, 0xD309D4D4, 0x5D108A8A,
207 0x0FE25151, 0x00000000, 0x6F9A1919, 0x9DE01A1A, 0x368F9494, 0x42E6C7C7,
208 0x4AECC9C9, 0x5EFDD2D2, 0xC1AB7F7F, 0xE0D8A8A8},
209
210 {0xBC75BC32, 0xECF3EC21, 0x20C62043, 0xB3F4B3C9, 0xDADBDA03, 0x027B028B,
211 0xE2FBE22B, 0x9EC89EFA, 0xC94AC9EC, 0xD4D3D409, 0x18E6186B, 0x1E6B1E9F,
212 0x9845980E, 0xB27DB238, 0xA6E8A6D2, 0x264B26B7, 0x3CD63C57, 0x9332938A,
213 0x82D882EE, 0x52FD5298, 0x7B377BD4, 0xBB71BB37, 0x5BF15B97, 0x47E14783,
214 0x2430243C, 0x510F51E2, 0xBAF8BAC6, 0x4A1B4AF3, 0xBF87BF48, 0x0DFA0D70,
215 0xB006B0B3, 0x753F75DE, 0xD25ED2FD, 0x7DBA7D20, 0x66AE6631, 0x3A5B3AA3,
216 0x598A591C, 0x00000000, 0xCDBCCD93, 0x1A9D1AE0, 0xAE6DAE2C, 0x7FC17FAB,
217 0x2BB12BC7, 0xBE0EBEB9, 0xE080E0A0, 0x8A5D8A10, 0x3BD23B52, 0x64D564BA,
218 0xD8A0D888, 0xE784E7A5, 0x5F075FE8, 0x1B141B11, 0x2CB52CC2, 0xFC90FCB4,
219 0x312C3127, 0x80A38065, 0x73B2732A, 0x0C730C81, 0x794C795F, 0x6B546B41,
220 0x4B924B02, 0x53745369, 0x9436948F, 0x8351831F, 0x2A382A36, 0xC4B0C49C,
221 0x22BD22C8, 0xD55AD5F8, 0xBDFCBDC3, 0x48604878, 0xFF62FFCE, 0x4C964C07,
222 0x416C4177, 0xC742C7E6, 0xEBF7EB24, 0x1C101C14, 0x5D7C5D63, 0x36283622,
223 0x672767C0, 0xE98CE9AF, 0x441344F9, 0x149514EA, 0xF59CF5BB, 0xCFC7CF18,
224 0x3F243F2D, 0xC046C0E3, 0x723B72DB, 0x5470546C, 0x29CA294C, 0xF0E3F035,
225 0x088508FE, 0xC6CBC617, 0xF311F34F, 0x8CD08CE4, 0xA493A459, 0xCAB8CA96,
226 0x68A6683B, 0xB883B84D, 0x38203828, 0xE5FFE52E, 0xAD9FAD56, 0x0B770B84,
227 0xC8C3C81D, 0x99CC99FF, 0x580358ED, 0x196F199A, 0x0E080E0A, 0x95BF957E,
228 0x70407050, 0xF7E7F730, 0x6E2B6ECF, 0x1FE21F6E, 0xB579B53D, 0x090C090F,
229 0x61AA6134, 0x57825716, 0x9F419F0B, 0x9D3A9D80, 0x11EA1164, 0x25B925CD,
230 0xAFE4AFDD, 0x459A4508, 0xDFA4DF8D, 0xA397A35C, 0xEA7EEAD5, 0x35DA3558,
231 0xED7AEDD0, 0x431743FC, 0xF866F8CB, 0xFB94FBB1, 0x37A137D3, 0xFA1DFA40,
232 0xC23DC268, 0xB4F0B4CC, 0x32DE325D, 0x9CB39C71, 0x560B56E7, 0xE372E3DA,
233 0x87A78760, 0x151C151B, 0xF9EFF93A, 0x63D163BF, 0x345334A9, 0x9A3E9A85,
234 0xB18FB142, 0x7C337CD1, 0x8826889B, 0x3D5F3DA6, 0xA1ECA1D7, 0xE476E4DF,
235 0x812A8194, 0x91499101, 0x0F810FFB, 0xEE88EEAA, 0x16EE1661, 0xD721D773,
236 0x97C497F5, 0xA51AA5A8, 0xFEEBFE3F, 0x6DD96DB5, 0x78C578AE, 0xC539C56D,
237 0x1D991DE5, 0x76CD76A4, 0x3EAD3EDC, 0xCB31CB67, 0xB68BB647, 0xEF01EF5B,
238 0x1218121E, 0x602360C5, 0x6ADD6AB0, 0x4D1F4DF6, 0xCE4ECEE9, 0xDE2DDE7C,
239 0x55F9559D, 0x7E487E5A, 0x214F21B2, 0x03F2037A, 0xA065A026, 0x5E8E5E19,
240 0x5A785A66, 0x655C654B, 0x6258624E, 0xFD19FD45, 0x068D06F4, 0x40E54086,
241 0xF298F2BE, 0x335733AC, 0x17671790, 0x057F058E, 0xE805E85E, 0x4F644F7D,
242 0x89AF896A, 0x10631095, 0x74B6742F, 0x0AFE0A75, 0x5CF55C92, 0x9BB79B74,
243 0x2D3C2D33, 0x30A530D6, 0x2ECE2E49, 0x49E94989, 0x46684672, 0x77447755,
244 0xA8E0A8D8, 0x964D9604, 0x284328BD, 0xA969A929, 0xD929D979, 0x862E8691,
245 0xD1ACD187, 0xF415F44A, 0x8D598D15, 0xD6A8D682, 0xB90AB9BC, 0x429E420D,
246 0xF66EF6C1, 0x2F472FB8, 0xDDDFDD06, 0x23342339, 0xCC35CC62, 0xF16AF1C4,
247 0xC1CFC112, 0x85DC85EB, 0x8F228F9E, 0x71C971A1, 0x90C090F0, 0xAA9BAA53,
248 0x018901F1, 0x8BD48BE1, 0x4EED4E8C, 0x8EAB8E6F, 0xAB12ABA2, 0x6FA26F3E,
249 0xE60DE654, 0xDB52DBF2, 0x92BB927B, 0xB702B7B6, 0x692F69CA, 0x39A939D9,
250 0xD3D7D30C, 0xA761A723, 0xA21EA2AD, 0xC3B4C399, 0x6C506C44, 0x07040705,
251 0x04F6047F, 0x27C22746, 0xAC16ACA7, 0xD025D076, 0x50865013, 0xDC56DCF7,
252 0x8455841A, 0xE109E151, 0x7ABE7A25, 0x139113EF},
253
254 {0xD939A9D9, 0x90176790, 0x719CB371, 0xD2A6E8D2, 0x05070405, 0x9852FD98,
255 0x6580A365, 0xDFE476DF, 0x08459A08, 0x024B9202, 0xA0E080A0, 0x665A7866,
256 0xDDAFE4DD, 0xB06ADDB0, 0xBF63D1BF, 0x362A3836, 0x54E60D54, 0x4320C643,
257 0x62CC3562, 0xBEF298BE, 0x1E12181E, 0x24EBF724, 0xD7A1ECD7, 0x77416C77,
258 0xBD2843BD, 0x32BC7532, 0xD47B37D4, 0x9B88269B, 0x700DFA70, 0xF94413F9,
259 0xB1FB94B1, 0x5A7E485A, 0x7A03F27A, 0xE48CD0E4, 0x47B68B47, 0x3C24303C,
260 0xA5E784A5, 0x416B5441, 0x06DDDF06, 0xC56023C5, 0x45FD1945, 0xA33A5BA3,
261 0x68C23D68, 0x158D5915, 0x21ECF321, 0x3166AE31, 0x3E6FA23E, 0x16578216,
262 0x95106395, 0x5BEF015B, 0x4DB8834D, 0x91862E91, 0xB56DD9B5, 0x1F83511F,
263 0x53AA9B53, 0x635D7C63, 0x3B68A63B, 0x3FFEEB3F, 0xD630A5D6, 0x257ABE25,
264 0xA7AC16A7, 0x0F090C0F, 0x35F0E335, 0x23A76123, 0xF090C0F0, 0xAFE98CAF,
265 0x809D3A80, 0x925CF592, 0x810C7381, 0x27312C27, 0x76D02576, 0xE7560BE7,
266 0x7B92BB7B, 0xE9CE4EE9, 0xF10189F1, 0x9F1E6B9F, 0xA93453A9, 0xC4F16AC4,
267 0x99C3B499, 0x975BF197, 0x8347E183, 0x6B18E66B, 0xC822BDC8, 0x0E98450E,
268 0x6E1FE26E, 0xC9B3F4C9, 0x2F74B62F, 0xCBF866CB, 0xFF99CCFF, 0xEA1495EA,
269 0xED5803ED, 0xF7DC56F7, 0xE18BD4E1, 0x1B151C1B, 0xADA21EAD, 0x0CD3D70C,
270 0x2BE2FB2B, 0x1DC8C31D, 0x195E8E19, 0xC22CB5C2, 0x8949E989, 0x12C1CF12,
271 0x7E95BF7E, 0x207DBA20, 0x6411EA64, 0x840B7784, 0x6DC5396D, 0x6A89AF6A,
272 0xD17C33D1, 0xA171C9A1, 0xCEFF62CE, 0x37BB7137, 0xFB0F81FB, 0x3DB5793D,
273 0x51E10951, 0xDC3EADDC, 0x2D3F242D, 0xA476CDA4, 0x9D55F99D, 0xEE82D8EE,
274 0x8640E586, 0xAE78C5AE, 0xCD25B9CD, 0x04964D04, 0x55774455, 0x0A0E080A,
275 0x13508613, 0x30F7E730, 0xD337A1D3, 0x40FA1D40, 0x3461AA34, 0x8C4EED8C,
276 0xB3B006B3, 0x6C54706C, 0x2A73B22A, 0x523BD252, 0x0B9F410B, 0x8B027B8B,
277 0x88D8A088, 0x4FF3114F, 0x67CB3167, 0x4627C246, 0xC06727C0, 0xB4FC90B4,
278 0x28382028, 0x7F04F67F, 0x78486078, 0x2EE5FF2E, 0x074C9607, 0x4B655C4B,
279 0xC72BB1C7, 0x6F8EAB6F, 0x0D429E0D, 0xBBF59CBB, 0xF2DB52F2, 0xF34A1BF3,
280 0xA63D5FA6, 0x59A49359, 0xBCB90ABC, 0x3AF9EF3A, 0xEF1391EF, 0xFE0885FE,
281 0x01914901, 0x6116EE61, 0x7CDE2D7C, 0xB2214FB2, 0x42B18F42, 0xDB723BDB,
282 0xB82F47B8, 0x48BF8748, 0x2CAE6D2C, 0xE3C046E3, 0x573CD657, 0x859A3E85,
283 0x29A96929, 0x7D4F647D, 0x94812A94, 0x492ECE49, 0x17C6CB17, 0xCA692FCA,
284 0xC3BDFCC3, 0x5CA3975C, 0x5EE8055E, 0xD0ED7AD0, 0x87D1AC87, 0x8E057F8E,
285 0xBA64D5BA, 0xA8A51AA8, 0xB7264BB7, 0xB9BE0EB9, 0x6087A760, 0xF8D55AF8,
286 0x22362822, 0x111B1411, 0xDE753FDE, 0x79D92979, 0xAAEE88AA, 0x332D3C33,
287 0x5F794C5F, 0xB6B702B6, 0x96CAB896, 0x5835DA58, 0x9CC4B09C, 0xFC4317FC,
288 0x1A84551A, 0xF64D1FF6, 0x1C598A1C, 0x38B27D38, 0xAC3357AC, 0x18CFC718,
289 0xF4068DF4, 0x69537469, 0x749BB774, 0xF597C4F5, 0x56AD9F56, 0xDAE372DA,
290 0xD5EA7ED5, 0x4AF4154A, 0x9E8F229E, 0xA2AB12A2, 0x4E62584E, 0xE85F07E8,
291 0xE51D99E5, 0x39233439, 0xC1F66EC1, 0x446C5044, 0x5D32DE5D, 0x72466872,
292 0x26A06526, 0x93CDBC93, 0x03DADB03, 0xC6BAF8C6, 0xFA9EC8FA, 0x82D6A882,
293 0xCF6E2BCF, 0x50704050, 0xEB85DCEB, 0x750AFE75, 0x8A93328A, 0x8DDFA48D,
294 0x4C29CA4C, 0x141C1014, 0x73D72173, 0xCCB4F0CC, 0x09D4D309, 0x108A5D10,
295 0xE2510FE2, 0x00000000, 0x9A196F9A, 0xE01A9DE0, 0x8F94368F, 0xE6C742E6,
296 0xECC94AEC, 0xFDD25EFD, 0xAB7FC1AB, 0xD8A8E0D8}
297};
298
299/* The exp_to_poly and poly_to_exp tables are used to perform efficient
300 * operations in GF(2^8) represented as GF(2)[x]/w(x) where
301 * w(x)=x^8+x^6+x^3+x^2+1. We care about doing that because it's part of the
302 * definition of the RS matrix in the key schedule. Elements of that field
303 * are polynomials of degree not greater than 7 and all coefficients 0 or 1,
304 * which can be represented naturally by bytes (just substitute x=2). In that
305 * form, GF(2^8) addition is the same as bitwise XOR, but GF(2^8)
306 * multiplication is inefficient without hardware support. To multiply
307 * faster, I make use of the fact x is a generator for the nonzero elements,
308 * so that every element p of GF(2)[x]/w(x) is either 0 or equal to (x)^n for
309 * some n in 0..254. Note that that caret is exponentiation in GF(2^8),
310 * *not* polynomial notation. So if I want to compute pq where p and q are
311 * in GF(2^8), I can just say:
312 * 1. if p=0 or q=0 then pq=0
313 * 2. otherwise, find m and n such that p=x^m and q=x^n
314 * 3. pq=(x^m)(x^n)=x^(m+n), so add m and n and find pq
315 * The translations in steps 2 and 3 are looked up in the tables
316 * poly_to_exp (for step 2) and exp_to_poly (for step 3). To see this
317 * in action, look at the CALC_S macro. As additional wrinkles, note that
318 * one of my operands is always a constant, so the poly_to_exp lookup on it
319 * is done in advance; I included the original values in the comments so
320 * readers can have some chance of recognizing that this *is* the RS matrix
321 * from the Twofish paper. I've only included the table entries I actually
322 * need; I never do a lookup on a variable input of zero and the biggest
323 * exponents I'll ever see are 254 (variable) and 237 (constant), so they'll
324 * never sum to more than 491. I'm repeating part of the exp_to_poly table
325 * so that I don't have to do mod-255 reduction in the exponent arithmetic.
326 * Since I know my constant operands are never zero, I only have to worry
327 * about zero values in the variable operand, and I do it with a simple
328 * conditional branch. I know conditionals are expensive, but I couldn't
329 * see a non-horrible way of avoiding them, and I did manage to group the
330 * statements so that each if covers four group multiplications. */
331
332static const u8 poly_to_exp[255] = {
333 0x00, 0x01, 0x17, 0x02, 0x2E, 0x18, 0x53, 0x03, 0x6A, 0x2F, 0x93, 0x19,
334 0x34, 0x54, 0x45, 0x04, 0x5C, 0x6B, 0xB6, 0x30, 0xA6, 0x94, 0x4B, 0x1A,
335 0x8C, 0x35, 0x81, 0x55, 0xAA, 0x46, 0x0D, 0x05, 0x24, 0x5D, 0x87, 0x6C,
336 0x9B, 0xB7, 0xC1, 0x31, 0x2B, 0xA7, 0xA3, 0x95, 0x98, 0x4C, 0xCA, 0x1B,
337 0xE6, 0x8D, 0x73, 0x36, 0xCD, 0x82, 0x12, 0x56, 0x62, 0xAB, 0xF0, 0x47,
338 0x4F, 0x0E, 0xBD, 0x06, 0xD4, 0x25, 0xD2, 0x5E, 0x27, 0x88, 0x66, 0x6D,
339 0xD6, 0x9C, 0x79, 0xB8, 0x08, 0xC2, 0xDF, 0x32, 0x68, 0x2C, 0xFD, 0xA8,
340 0x8A, 0xA4, 0x5A, 0x96, 0x29, 0x99, 0x22, 0x4D, 0x60, 0xCB, 0xE4, 0x1C,
341 0x7B, 0xE7, 0x3B, 0x8E, 0x9E, 0x74, 0xF4, 0x37, 0xD8, 0xCE, 0xF9, 0x83,
342 0x6F, 0x13, 0xB2, 0x57, 0xE1, 0x63, 0xDC, 0xAC, 0xC4, 0xF1, 0xAF, 0x48,
343 0x0A, 0x50, 0x42, 0x0F, 0xBA, 0xBE, 0xC7, 0x07, 0xDE, 0xD5, 0x78, 0x26,
344 0x65, 0xD3, 0xD1, 0x5F, 0xE3, 0x28, 0x21, 0x89, 0x59, 0x67, 0xFC, 0x6E,
345 0xB1, 0xD7, 0xF8, 0x9D, 0xF3, 0x7A, 0x3A, 0xB9, 0xC6, 0x09, 0x41, 0xC3,
346 0xAE, 0xE0, 0xDB, 0x33, 0x44, 0x69, 0x92, 0x2D, 0x52, 0xFE, 0x16, 0xA9,
347 0x0C, 0x8B, 0x80, 0xA5, 0x4A, 0x5B, 0xB5, 0x97, 0xC9, 0x2A, 0xA2, 0x9A,
348 0xC0, 0x23, 0x86, 0x4E, 0xBC, 0x61, 0xEF, 0xCC, 0x11, 0xE5, 0x72, 0x1D,
349 0x3D, 0x7C, 0xEB, 0xE8, 0xE9, 0x3C, 0xEA, 0x8F, 0x7D, 0x9F, 0xEC, 0x75,
350 0x1E, 0xF5, 0x3E, 0x38, 0xF6, 0xD9, 0x3F, 0xCF, 0x76, 0xFA, 0x1F, 0x84,
351 0xA0, 0x70, 0xED, 0x14, 0x90, 0xB3, 0x7E, 0x58, 0xFB, 0xE2, 0x20, 0x64,
352 0xD0, 0xDD, 0x77, 0xAD, 0xDA, 0xC5, 0x40, 0xF2, 0x39, 0xB0, 0xF7, 0x49,
353 0xB4, 0x0B, 0x7F, 0x51, 0x15, 0x43, 0x91, 0x10, 0x71, 0xBB, 0xEE, 0xBF,
354 0x85, 0xC8, 0xA1
355};
356
357static const u8 exp_to_poly[492] = {
358 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x4D, 0x9A, 0x79, 0xF2,
359 0xA9, 0x1F, 0x3E, 0x7C, 0xF8, 0xBD, 0x37, 0x6E, 0xDC, 0xF5, 0xA7, 0x03,
360 0x06, 0x0C, 0x18, 0x30, 0x60, 0xC0, 0xCD, 0xD7, 0xE3, 0x8B, 0x5B, 0xB6,
361 0x21, 0x42, 0x84, 0x45, 0x8A, 0x59, 0xB2, 0x29, 0x52, 0xA4, 0x05, 0x0A,
362 0x14, 0x28, 0x50, 0xA0, 0x0D, 0x1A, 0x34, 0x68, 0xD0, 0xED, 0x97, 0x63,
363 0xC6, 0xC1, 0xCF, 0xD3, 0xEB, 0x9B, 0x7B, 0xF6, 0xA1, 0x0F, 0x1E, 0x3C,
364 0x78, 0xF0, 0xAD, 0x17, 0x2E, 0x5C, 0xB8, 0x3D, 0x7A, 0xF4, 0xA5, 0x07,
365 0x0E, 0x1C, 0x38, 0x70, 0xE0, 0x8D, 0x57, 0xAE, 0x11, 0x22, 0x44, 0x88,
366 0x5D, 0xBA, 0x39, 0x72, 0xE4, 0x85, 0x47, 0x8E, 0x51, 0xA2, 0x09, 0x12,
367 0x24, 0x48, 0x90, 0x6D, 0xDA, 0xF9, 0xBF, 0x33, 0x66, 0xCC, 0xD5, 0xE7,
368 0x83, 0x4B, 0x96, 0x61, 0xC2, 0xC9, 0xDF, 0xF3, 0xAB, 0x1B, 0x36, 0x6C,
369 0xD8, 0xFD, 0xB7, 0x23, 0x46, 0x8C, 0x55, 0xAA, 0x19, 0x32, 0x64, 0xC8,
370 0xDD, 0xF7, 0xA3, 0x0B, 0x16, 0x2C, 0x58, 0xB0, 0x2D, 0x5A, 0xB4, 0x25,
371 0x4A, 0x94, 0x65, 0xCA, 0xD9, 0xFF, 0xB3, 0x2B, 0x56, 0xAC, 0x15, 0x2A,
372 0x54, 0xA8, 0x1D, 0x3A, 0x74, 0xE8, 0x9D, 0x77, 0xEE, 0x91, 0x6F, 0xDE,
373 0xF1, 0xAF, 0x13, 0x26, 0x4C, 0x98, 0x7D, 0xFA, 0xB9, 0x3F, 0x7E, 0xFC,
374 0xB5, 0x27, 0x4E, 0x9C, 0x75, 0xEA, 0x99, 0x7F, 0xFE, 0xB1, 0x2F, 0x5E,
375 0xBC, 0x35, 0x6A, 0xD4, 0xE5, 0x87, 0x43, 0x86, 0x41, 0x82, 0x49, 0x92,
376 0x69, 0xD2, 0xE9, 0x9F, 0x73, 0xE6, 0x81, 0x4F, 0x9E, 0x71, 0xE2, 0x89,
377 0x5F, 0xBE, 0x31, 0x62, 0xC4, 0xC5, 0xC7, 0xC3, 0xCB, 0xDB, 0xFB, 0xBB,
378 0x3B, 0x76, 0xEC, 0x95, 0x67, 0xCE, 0xD1, 0xEF, 0x93, 0x6B, 0xD6, 0xE1,
379 0x8F, 0x53, 0xA6, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x4D,
380 0x9A, 0x79, 0xF2, 0xA9, 0x1F, 0x3E, 0x7C, 0xF8, 0xBD, 0x37, 0x6E, 0xDC,
381 0xF5, 0xA7, 0x03, 0x06, 0x0C, 0x18, 0x30, 0x60, 0xC0, 0xCD, 0xD7, 0xE3,
382 0x8B, 0x5B, 0xB6, 0x21, 0x42, 0x84, 0x45, 0x8A, 0x59, 0xB2, 0x29, 0x52,
383 0xA4, 0x05, 0x0A, 0x14, 0x28, 0x50, 0xA0, 0x0D, 0x1A, 0x34, 0x68, 0xD0,
384 0xED, 0x97, 0x63, 0xC6, 0xC1, 0xCF, 0xD3, 0xEB, 0x9B, 0x7B, 0xF6, 0xA1,
385 0x0F, 0x1E, 0x3C, 0x78, 0xF0, 0xAD, 0x17, 0x2E, 0x5C, 0xB8, 0x3D, 0x7A,
386 0xF4, 0xA5, 0x07, 0x0E, 0x1C, 0x38, 0x70, 0xE0, 0x8D, 0x57, 0xAE, 0x11,
387 0x22, 0x44, 0x88, 0x5D, 0xBA, 0x39, 0x72, 0xE4, 0x85, 0x47, 0x8E, 0x51,
388 0xA2, 0x09, 0x12, 0x24, 0x48, 0x90, 0x6D, 0xDA, 0xF9, 0xBF, 0x33, 0x66,
389 0xCC, 0xD5, 0xE7, 0x83, 0x4B, 0x96, 0x61, 0xC2, 0xC9, 0xDF, 0xF3, 0xAB,
390 0x1B, 0x36, 0x6C, 0xD8, 0xFD, 0xB7, 0x23, 0x46, 0x8C, 0x55, 0xAA, 0x19,
391 0x32, 0x64, 0xC8, 0xDD, 0xF7, 0xA3, 0x0B, 0x16, 0x2C, 0x58, 0xB0, 0x2D,
392 0x5A, 0xB4, 0x25, 0x4A, 0x94, 0x65, 0xCA, 0xD9, 0xFF, 0xB3, 0x2B, 0x56,
393 0xAC, 0x15, 0x2A, 0x54, 0xA8, 0x1D, 0x3A, 0x74, 0xE8, 0x9D, 0x77, 0xEE,
394 0x91, 0x6F, 0xDE, 0xF1, 0xAF, 0x13, 0x26, 0x4C, 0x98, 0x7D, 0xFA, 0xB9,
395 0x3F, 0x7E, 0xFC, 0xB5, 0x27, 0x4E, 0x9C, 0x75, 0xEA, 0x99, 0x7F, 0xFE,
396 0xB1, 0x2F, 0x5E, 0xBC, 0x35, 0x6A, 0xD4, 0xE5, 0x87, 0x43, 0x86, 0x41,
397 0x82, 0x49, 0x92, 0x69, 0xD2, 0xE9, 0x9F, 0x73, 0xE6, 0x81, 0x4F, 0x9E,
398 0x71, 0xE2, 0x89, 0x5F, 0xBE, 0x31, 0x62, 0xC4, 0xC5, 0xC7, 0xC3, 0xCB
399};
400
401
402/* The table constants are indices of
403 * S-box entries, preprocessed through q0 and q1. */
404static const u8 calc_sb_tbl[512] = {
405 0xA9, 0x75, 0x67, 0xF3, 0xB3, 0xC6, 0xE8, 0xF4,
406 0x04, 0xDB, 0xFD, 0x7B, 0xA3, 0xFB, 0x76, 0xC8,
407 0x9A, 0x4A, 0x92, 0xD3, 0x80, 0xE6, 0x78, 0x6B,
408 0xE4, 0x45, 0xDD, 0x7D, 0xD1, 0xE8, 0x38, 0x4B,
409 0x0D, 0xD6, 0xC6, 0x32, 0x35, 0xD8, 0x98, 0xFD,
410 0x18, 0x37, 0xF7, 0x71, 0xEC, 0xF1, 0x6C, 0xE1,
411 0x43, 0x30, 0x75, 0x0F, 0x37, 0xF8, 0x26, 0x1B,
412 0xFA, 0x87, 0x13, 0xFA, 0x94, 0x06, 0x48, 0x3F,
413 0xF2, 0x5E, 0xD0, 0xBA, 0x8B, 0xAE, 0x30, 0x5B,
414 0x84, 0x8A, 0x54, 0x00, 0xDF, 0xBC, 0x23, 0x9D,
415 0x19, 0x6D, 0x5B, 0xC1, 0x3D, 0xB1, 0x59, 0x0E,
416 0xF3, 0x80, 0xAE, 0x5D, 0xA2, 0xD2, 0x82, 0xD5,
417 0x63, 0xA0, 0x01, 0x84, 0x83, 0x07, 0x2E, 0x14,
418 0xD9, 0xB5, 0x51, 0x90, 0x9B, 0x2C, 0x7C, 0xA3,
419 0xA6, 0xB2, 0xEB, 0x73, 0xA5, 0x4C, 0xBE, 0x54,
420 0x16, 0x92, 0x0C, 0x74, 0xE3, 0x36, 0x61, 0x51,
421 0xC0, 0x38, 0x8C, 0xB0, 0x3A, 0xBD, 0xF5, 0x5A,
422 0x73, 0xFC, 0x2C, 0x60, 0x25, 0x62, 0x0B, 0x96,
423 0xBB, 0x6C, 0x4E, 0x42, 0x89, 0xF7, 0x6B, 0x10,
424 0x53, 0x7C, 0x6A, 0x28, 0xB4, 0x27, 0xF1, 0x8C,
425 0xE1, 0x13, 0xE6, 0x95, 0xBD, 0x9C, 0x45, 0xC7,
426 0xE2, 0x24, 0xF4, 0x46, 0xB6, 0x3B, 0x66, 0x70,
427 0xCC, 0xCA, 0x95, 0xE3, 0x03, 0x85, 0x56, 0xCB,
428 0xD4, 0x11, 0x1C, 0xD0, 0x1E, 0x93, 0xD7, 0xB8,
429 0xFB, 0xA6, 0xC3, 0x83, 0x8E, 0x20, 0xB5, 0xFF,
430 0xE9, 0x9F, 0xCF, 0x77, 0xBF, 0xC3, 0xBA, 0xCC,
431 0xEA, 0x03, 0x77, 0x6F, 0x39, 0x08, 0xAF, 0xBF,
432 0x33, 0x40, 0xC9, 0xE7, 0x62, 0x2B, 0x71, 0xE2,
433 0x81, 0x79, 0x79, 0x0C, 0x09, 0xAA, 0xAD, 0x82,
434 0x24, 0x41, 0xCD, 0x3A, 0xF9, 0xEA, 0xD8, 0xB9,
435 0xE5, 0xE4, 0xC5, 0x9A, 0xB9, 0xA4, 0x4D, 0x97,
436 0x44, 0x7E, 0x08, 0xDA, 0x86, 0x7A, 0xE7, 0x17,
437 0xA1, 0x66, 0x1D, 0x94, 0xAA, 0xA1, 0xED, 0x1D,
438 0x06, 0x3D, 0x70, 0xF0, 0xB2, 0xDE, 0xD2, 0xB3,
439 0x41, 0x0B, 0x7B, 0x72, 0xA0, 0xA7, 0x11, 0x1C,
440 0x31, 0xEF, 0xC2, 0xD1, 0x27, 0x53, 0x90, 0x3E,
441 0x20, 0x8F, 0xF6, 0x33, 0x60, 0x26, 0xFF, 0x5F,
442 0x96, 0xEC, 0x5C, 0x76, 0xB1, 0x2A, 0xAB, 0x49,
443 0x9E, 0x81, 0x9C, 0x88, 0x52, 0xEE, 0x1B, 0x21,
444 0x5F, 0xC4, 0x93, 0x1A, 0x0A, 0xEB, 0xEF, 0xD9,
445 0x91, 0xC5, 0x85, 0x39, 0x49, 0x99, 0xEE, 0xCD,
446 0x2D, 0xAD, 0x4F, 0x31, 0x8F, 0x8B, 0x3B, 0x01,
447 0x47, 0x18, 0x87, 0x23, 0x6D, 0xDD, 0x46, 0x1F,
448 0xD6, 0x4E, 0x3E, 0x2D, 0x69, 0xF9, 0x64, 0x48,
449 0x2A, 0x4F, 0xCE, 0xF2, 0xCB, 0x65, 0x2F, 0x8E,
450 0xFC, 0x78, 0x97, 0x5C, 0x05, 0x58, 0x7A, 0x19,
451 0xAC, 0x8D, 0x7F, 0xE5, 0xD5, 0x98, 0x1A, 0x57,
452 0x4B, 0x67, 0x0E, 0x7F, 0xA7, 0x05, 0x5A, 0x64,
453 0x28, 0xAF, 0x14, 0x63, 0x3F, 0xB6, 0x29, 0xFE,
454 0x88, 0xF5, 0x3C, 0xB7, 0x4C, 0x3C, 0x02, 0xA5,
455 0xB8, 0xCE, 0xDA, 0xE9, 0xB0, 0x68, 0x17, 0x44,
456 0x55, 0xE0, 0x1F, 0x4D, 0x8A, 0x43, 0x7D, 0x69,
457 0x57, 0x29, 0xC7, 0x2E, 0x8D, 0xAC, 0x74, 0x15,
458 0xB7, 0x59, 0xC4, 0xA8, 0x9F, 0x0A, 0x72, 0x9E,
459 0x7E, 0x6E, 0x15, 0x47, 0x22, 0xDF, 0x12, 0x34,
460 0x58, 0x35, 0x07, 0x6A, 0x99, 0xCF, 0x34, 0xDC,
461 0x6E, 0x22, 0x50, 0xC9, 0xDE, 0xC0, 0x68, 0x9B,
462 0x65, 0x89, 0xBC, 0xD4, 0xDB, 0xED, 0xF8, 0xAB,
463 0xC8, 0x12, 0xA8, 0xA2, 0x2B, 0x0D, 0x40, 0x52,
464 0xDC, 0xBB, 0xFE, 0x02, 0x32, 0x2F, 0xA4, 0xA9,
465 0xCA, 0xD7, 0x10, 0x61, 0x21, 0x1E, 0xF0, 0xB4,
466 0xD3, 0x50, 0x5D, 0x04, 0x0F, 0xF6, 0x00, 0xC2,
467 0x6F, 0x16, 0x9D, 0x25, 0x36, 0x86, 0x42, 0x56,
468 0x4A, 0x55, 0x5E, 0x09, 0xC1, 0xBE, 0xE0, 0x91
469};
470
471/* Macro to perform one column of the RS matrix multiplication. The
472 * parameters a, b, c, and d are the four bytes of output; i is the index
473 * of the key bytes, and w, x, y, and z, are the column of constants from
474 * the RS matrix, preprocessed through the poly_to_exp table. */
475
476#define CALC_S(a, b, c, d, i, w, x, y, z) \
477 if (key[i]) { \
478 tmp = poly_to_exp[key[i] - 1]; \
479 (a) ^= exp_to_poly[tmp + (w)]; \
480 (b) ^= exp_to_poly[tmp + (x)]; \
481 (c) ^= exp_to_poly[tmp + (y)]; \
482 (d) ^= exp_to_poly[tmp + (z)]; \
483 }
484
485/* Macros to calculate the key-dependent S-boxes for a 128-bit key using
486 * the S vector from CALC_S. CALC_SB_2 computes a single entry in all
487 * four S-boxes, where i is the index of the entry to compute, and a and b
488 * are the index numbers preprocessed through the q0 and q1 tables
489 * respectively. */
490
491#define CALC_SB_2(i, a, b) \
492 ctx->s[0][i] = mds[0][q0[(a) ^ sa] ^ se]; \
493 ctx->s[1][i] = mds[1][q0[(b) ^ sb] ^ sf]; \
494 ctx->s[2][i] = mds[2][q1[(a) ^ sc] ^ sg]; \
495 ctx->s[3][i] = mds[3][q1[(b) ^ sd] ^ sh]
496
497/* Macro exactly like CALC_SB_2, but for 192-bit keys. */
498
499#define CALC_SB192_2(i, a, b) \
500 ctx->s[0][i] = mds[0][q0[q0[(b) ^ sa] ^ se] ^ si]; \
501 ctx->s[1][i] = mds[1][q0[q1[(b) ^ sb] ^ sf] ^ sj]; \
502 ctx->s[2][i] = mds[2][q1[q0[(a) ^ sc] ^ sg] ^ sk]; \
503 ctx->s[3][i] = mds[3][q1[q1[(a) ^ sd] ^ sh] ^ sl];
504
505/* Macro exactly like CALC_SB_2, but for 256-bit keys. */
506
507#define CALC_SB256_2(i, a, b) \
508 ctx->s[0][i] = mds[0][q0[q0[q1[(b) ^ sa] ^ se] ^ si] ^ sm]; \
509 ctx->s[1][i] = mds[1][q0[q1[q1[(a) ^ sb] ^ sf] ^ sj] ^ sn]; \
510 ctx->s[2][i] = mds[2][q1[q0[q0[(a) ^ sc] ^ sg] ^ sk] ^ so]; \
511 ctx->s[3][i] = mds[3][q1[q1[q0[(b) ^ sd] ^ sh] ^ sl] ^ sp];
512
513/* Macros to calculate the whitening and round subkeys. CALC_K_2 computes the
514 * last two stages of the h() function for a given index (either 2i or 2i+1).
515 * a, b, c, and d are the four bytes going into the last two stages. For
516 * 128-bit keys, this is the entire h() function and a and c are the index
517 * preprocessed through q0 and q1 respectively; for longer keys they are the
518 * output of previous stages. j is the index of the first key byte to use.
519 * CALC_K computes a pair of subkeys for 128-bit Twofish, by calling CALC_K_2
520 * twice, doing the Pseudo-Hadamard Transform, and doing the necessary
521 * rotations. Its parameters are: a, the array to write the results into,
522 * j, the index of the first output entry, k and l, the preprocessed indices
523 * for index 2i, and m and n, the preprocessed indices for index 2i+1.
524 * CALC_K192_2 expands CALC_K_2 to handle 192-bit keys, by doing an
525 * additional lookup-and-XOR stage. The parameters a, b, c and d are the
526 * four bytes going into the last three stages. For 192-bit keys, c = d
527 * are the index preprocessed through q0, and a = b are the index
528 * preprocessed through q1; j is the index of the first key byte to use.
529 * CALC_K192 is identical to CALC_K but for using the CALC_K192_2 macro
530 * instead of CALC_K_2.
531 * CALC_K256_2 expands CALC_K192_2 to handle 256-bit keys, by doing an
532 * additional lookup-and-XOR stage. The parameters a and b are the index
533 * preprocessed through q0 and q1 respectively; j is the index of the first
534 * key byte to use. CALC_K256 is identical to CALC_K but for using the
535 * CALC_K256_2 macro instead of CALC_K_2. */
536
537#define CALC_K_2(a, b, c, d, j) \
538 mds[0][q0[a ^ key[(j) + 8]] ^ key[j]] \
539 ^ mds[1][q0[b ^ key[(j) + 9]] ^ key[(j) + 1]] \
540 ^ mds[2][q1[c ^ key[(j) + 10]] ^ key[(j) + 2]] \
541 ^ mds[3][q1[d ^ key[(j) + 11]] ^ key[(j) + 3]]
542
543#define CALC_K(a, j, k, l, m, n) \
544 x = CALC_K_2 (k, l, k, l, 0); \
545 y = CALC_K_2 (m, n, m, n, 4); \
546 y = rol32(y, 8); \
547 x += y; y += x; ctx->a[j] = x; \
548 ctx->a[(j) + 1] = rol32(y, 9)
549
550#define CALC_K192_2(a, b, c, d, j) \
551 CALC_K_2 (q0[a ^ key[(j) + 16]], \
552 q1[b ^ key[(j) + 17]], \
553 q0[c ^ key[(j) + 18]], \
554 q1[d ^ key[(j) + 19]], j)
555
556#define CALC_K192(a, j, k, l, m, n) \
557 x = CALC_K192_2 (l, l, k, k, 0); \
558 y = CALC_K192_2 (n, n, m, m, 4); \
559 y = rol32(y, 8); \
560 x += y; y += x; ctx->a[j] = x; \
561 ctx->a[(j) + 1] = rol32(y, 9)
562
563#define CALC_K256_2(a, b, j) \
564 CALC_K192_2 (q1[b ^ key[(j) + 24]], \
565 q1[a ^ key[(j) + 25]], \
566 q0[a ^ key[(j) + 26]], \
567 q0[b ^ key[(j) + 27]], j)
568
569#define CALC_K256(a, j, k, l, m, n) \
570 x = CALC_K256_2 (k, l, 0); \
571 y = CALC_K256_2 (m, n, 4); \
572 y = rol32(y, 8); \
573 x += y; y += x; ctx->a[j] = x; \
574 ctx->a[(j) + 1] = rol32(y, 9)
575
576
577/* Macros to compute the g() function in the encryption and decryption 50/* Macros to compute the g() function in the encryption and decryption
578 * rounds. G1 is the straight g() function; G2 includes the 8-bit 51 * rounds. G1 is the straight g() function; G2 includes the 8-bit
579 * rotation for the high 32-bit word. */ 52 * rotation for the high 32-bit word. */
@@ -630,176 +103,7 @@ static const u8 calc_sb_tbl[512] = {
630 x ^= ctx->w[m]; \ 103 x ^= ctx->w[m]; \
631 dst[n] = cpu_to_le32(x) 104 dst[n] = cpu_to_le32(x)
632 105
633#define TF_MIN_KEY_SIZE 16
634#define TF_MAX_KEY_SIZE 32
635#define TF_BLOCK_SIZE 16
636
637/* Structure for an expanded Twofish key. s contains the key-dependent
638 * S-boxes composed with the MDS matrix; w contains the eight "whitening"
639 * subkeys, K[0] through K[7]. k holds the remaining, "round" subkeys. Note
640 * that k[i] corresponds to what the Twofish paper calls K[i+8]. */
641struct twofish_ctx {
642 u32 s[4][256], w[8], k[32];
643};
644
645/* Perform the key setup. */
646static int twofish_setkey(struct crypto_tfm *tfm, const u8 *key,
647 unsigned int key_len, u32 *flags)
648{
649
650 struct twofish_ctx *ctx = crypto_tfm_ctx(tfm);
651 106
652 int i, j, k;
653
654 /* Temporaries for CALC_K. */
655 u32 x, y;
656
657 /* The S vector used to key the S-boxes, split up into individual bytes.
658 * 128-bit keys use only sa through sh; 256-bit use all of them. */
659 u8 sa = 0, sb = 0, sc = 0, sd = 0, se = 0, sf = 0, sg = 0, sh = 0;
660 u8 si = 0, sj = 0, sk = 0, sl = 0, sm = 0, sn = 0, so = 0, sp = 0;
661
662 /* Temporary for CALC_S. */
663 u8 tmp;
664
665 /* Check key length. */
666 if (key_len != 16 && key_len != 24 && key_len != 32)
667 {
668 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
669 return -EINVAL; /* unsupported key length */
670 }
671
672 /* Compute the first two words of the S vector. The magic numbers are
673 * the entries of the RS matrix, preprocessed through poly_to_exp. The
674 * numbers in the comments are the original (polynomial form) matrix
675 * entries. */
676 CALC_S (sa, sb, sc, sd, 0, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */
677 CALC_S (sa, sb, sc, sd, 1, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */
678 CALC_S (sa, sb, sc, sd, 2, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */
679 CALC_S (sa, sb, sc, sd, 3, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */
680 CALC_S (sa, sb, sc, sd, 4, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */
681 CALC_S (sa, sb, sc, sd, 5, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */
682 CALC_S (sa, sb, sc, sd, 6, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */
683 CALC_S (sa, sb, sc, sd, 7, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */
684 CALC_S (se, sf, sg, sh, 8, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */
685 CALC_S (se, sf, sg, sh, 9, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */
686 CALC_S (se, sf, sg, sh, 10, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */
687 CALC_S (se, sf, sg, sh, 11, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */
688 CALC_S (se, sf, sg, sh, 12, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */
689 CALC_S (se, sf, sg, sh, 13, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */
690 CALC_S (se, sf, sg, sh, 14, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */
691 CALC_S (se, sf, sg, sh, 15, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */
692
693 if (key_len == 24 || key_len == 32) { /* 192- or 256-bit key */
694 /* Calculate the third word of the S vector */
695 CALC_S (si, sj, sk, sl, 16, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */
696 CALC_S (si, sj, sk, sl, 17, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */
697 CALC_S (si, sj, sk, sl, 18, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */
698 CALC_S (si, sj, sk, sl, 19, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */
699 CALC_S (si, sj, sk, sl, 20, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */
700 CALC_S (si, sj, sk, sl, 21, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */
701 CALC_S (si, sj, sk, sl, 22, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */
702 CALC_S (si, sj, sk, sl, 23, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */
703 }
704
705 if (key_len == 32) { /* 256-bit key */
706 /* Calculate the fourth word of the S vector */
707 CALC_S (sm, sn, so, sp, 24, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */
708 CALC_S (sm, sn, so, sp, 25, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */
709 CALC_S (sm, sn, so, sp, 26, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */
710 CALC_S (sm, sn, so, sp, 27, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */
711 CALC_S (sm, sn, so, sp, 28, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */
712 CALC_S (sm, sn, so, sp, 29, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */
713 CALC_S (sm, sn, so, sp, 30, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */
714 CALC_S (sm, sn, so, sp, 31, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */
715
716 /* Compute the S-boxes. */
717 for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) {
718 CALC_SB256_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
719 }
720
721 /* Calculate whitening and round subkeys. The constants are
722 * indices of subkeys, preprocessed through q0 and q1. */
723 CALC_K256 (w, 0, 0xA9, 0x75, 0x67, 0xF3);
724 CALC_K256 (w, 2, 0xB3, 0xC6, 0xE8, 0xF4);
725 CALC_K256 (w, 4, 0x04, 0xDB, 0xFD, 0x7B);
726 CALC_K256 (w, 6, 0xA3, 0xFB, 0x76, 0xC8);
727 CALC_K256 (k, 0, 0x9A, 0x4A, 0x92, 0xD3);
728 CALC_K256 (k, 2, 0x80, 0xE6, 0x78, 0x6B);
729 CALC_K256 (k, 4, 0xE4, 0x45, 0xDD, 0x7D);
730 CALC_K256 (k, 6, 0xD1, 0xE8, 0x38, 0x4B);
731 CALC_K256 (k, 8, 0x0D, 0xD6, 0xC6, 0x32);
732 CALC_K256 (k, 10, 0x35, 0xD8, 0x98, 0xFD);
733 CALC_K256 (k, 12, 0x18, 0x37, 0xF7, 0x71);
734 CALC_K256 (k, 14, 0xEC, 0xF1, 0x6C, 0xE1);
735 CALC_K256 (k, 16, 0x43, 0x30, 0x75, 0x0F);
736 CALC_K256 (k, 18, 0x37, 0xF8, 0x26, 0x1B);
737 CALC_K256 (k, 20, 0xFA, 0x87, 0x13, 0xFA);
738 CALC_K256 (k, 22, 0x94, 0x06, 0x48, 0x3F);
739 CALC_K256 (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
740 CALC_K256 (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
741 CALC_K256 (k, 28, 0x84, 0x8A, 0x54, 0x00);
742 CALC_K256 (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
743 } else if (key_len == 24) { /* 192-bit key */
744 /* Compute the S-boxes. */
745 for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) {
746 CALC_SB192_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
747 }
748
749 /* Calculate whitening and round subkeys. The constants are
750 * indices of subkeys, preprocessed through q0 and q1. */
751 CALC_K192 (w, 0, 0xA9, 0x75, 0x67, 0xF3);
752 CALC_K192 (w, 2, 0xB3, 0xC6, 0xE8, 0xF4);
753 CALC_K192 (w, 4, 0x04, 0xDB, 0xFD, 0x7B);
754 CALC_K192 (w, 6, 0xA3, 0xFB, 0x76, 0xC8);
755 CALC_K192 (k, 0, 0x9A, 0x4A, 0x92, 0xD3);
756 CALC_K192 (k, 2, 0x80, 0xE6, 0x78, 0x6B);
757 CALC_K192 (k, 4, 0xE4, 0x45, 0xDD, 0x7D);
758 CALC_K192 (k, 6, 0xD1, 0xE8, 0x38, 0x4B);
759 CALC_K192 (k, 8, 0x0D, 0xD6, 0xC6, 0x32);
760 CALC_K192 (k, 10, 0x35, 0xD8, 0x98, 0xFD);
761 CALC_K192 (k, 12, 0x18, 0x37, 0xF7, 0x71);
762 CALC_K192 (k, 14, 0xEC, 0xF1, 0x6C, 0xE1);
763 CALC_K192 (k, 16, 0x43, 0x30, 0x75, 0x0F);
764 CALC_K192 (k, 18, 0x37, 0xF8, 0x26, 0x1B);
765 CALC_K192 (k, 20, 0xFA, 0x87, 0x13, 0xFA);
766 CALC_K192 (k, 22, 0x94, 0x06, 0x48, 0x3F);
767 CALC_K192 (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
768 CALC_K192 (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
769 CALC_K192 (k, 28, 0x84, 0x8A, 0x54, 0x00);
770 CALC_K192 (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
771 } else { /* 128-bit key */
772 /* Compute the S-boxes. */
773 for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) {
774 CALC_SB_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
775 }
776
777 /* Calculate whitening and round subkeys. The constants are
778 * indices of subkeys, preprocessed through q0 and q1. */
779 CALC_K (w, 0, 0xA9, 0x75, 0x67, 0xF3);
780 CALC_K (w, 2, 0xB3, 0xC6, 0xE8, 0xF4);
781 CALC_K (w, 4, 0x04, 0xDB, 0xFD, 0x7B);
782 CALC_K (w, 6, 0xA3, 0xFB, 0x76, 0xC8);
783 CALC_K (k, 0, 0x9A, 0x4A, 0x92, 0xD3);
784 CALC_K (k, 2, 0x80, 0xE6, 0x78, 0x6B);
785 CALC_K (k, 4, 0xE4, 0x45, 0xDD, 0x7D);
786 CALC_K (k, 6, 0xD1, 0xE8, 0x38, 0x4B);
787 CALC_K (k, 8, 0x0D, 0xD6, 0xC6, 0x32);
788 CALC_K (k, 10, 0x35, 0xD8, 0x98, 0xFD);
789 CALC_K (k, 12, 0x18, 0x37, 0xF7, 0x71);
790 CALC_K (k, 14, 0xEC, 0xF1, 0x6C, 0xE1);
791 CALC_K (k, 16, 0x43, 0x30, 0x75, 0x0F);
792 CALC_K (k, 18, 0x37, 0xF8, 0x26, 0x1B);
793 CALC_K (k, 20, 0xFA, 0x87, 0x13, 0xFA);
794 CALC_K (k, 22, 0x94, 0x06, 0x48, 0x3F);
795 CALC_K (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
796 CALC_K (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
797 CALC_K (k, 28, 0x84, 0x8A, 0x54, 0x00);
798 CALC_K (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
799 }
800
801 return 0;
802}
803 107
804/* Encrypt one block. in and out may be the same. */ 108/* Encrypt one block. in and out may be the same. */
805static void twofish_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 109static void twofish_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
@@ -877,6 +181,8 @@ static void twofish_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
877 181
878static struct crypto_alg alg = { 182static struct crypto_alg alg = {
879 .cra_name = "twofish", 183 .cra_name = "twofish",
184 .cra_driver_name = "twofish-generic",
185 .cra_priority = 100,
880 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 186 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
881 .cra_blocksize = TF_BLOCK_SIZE, 187 .cra_blocksize = TF_BLOCK_SIZE,
882 .cra_ctxsize = sizeof(struct twofish_ctx), 188 .cra_ctxsize = sizeof(struct twofish_ctx),
diff --git a/crypto/twofish_common.c b/crypto/twofish_common.c
new file mode 100644
index 000000000000..b4b9c0c3f4ae
--- /dev/null
+++ b/crypto/twofish_common.c
@@ -0,0 +1,744 @@
1/*
2 * Common Twofish algorithm parts shared between the c and assembler
3 * implementations
4 *
5 * Originally Twofish for GPG
6 * By Matthew Skala <mskala@ansuz.sooke.bc.ca>, July 26, 1998
7 * 256-bit key length added March 20, 1999
8 * Some modifications to reduce the text size by Werner Koch, April, 1998
9 * Ported to the kerneli patch by Marc Mutz <Marc@Mutz.com>
10 * Ported to CryptoAPI by Colin Slater <hoho@tacomeat.net>
11 *
12 * The original author has disclaimed all copyright interest in this
13 * code and thus put it in the public domain. The subsequent authors
14 * have put this under the GNU General Public License.
15 *
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
20 *
21 * This program is distributed in the hope that it will be useful,
22 * but WITHOUT ANY WARRANTY; without even the implied warranty of
23 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24 * GNU General Public License for more details.
25 *
26 * You should have received a copy of the GNU General Public License
27 * along with this program; if not, write to the Free Software
28 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
29 * USA
30 *
31 * This code is a "clean room" implementation, written from the paper
32 * _Twofish: A 128-Bit Block Cipher_ by Bruce Schneier, John Kelsey,
33 * Doug Whiting, David Wagner, Chris Hall, and Niels Ferguson, available
34 * through http://www.counterpane.com/twofish.html
35 *
36 * For background information on multiplication in finite fields, used for
37 * the matrix operations in the key schedule, see the book _Contemporary
38 * Abstract Algebra_ by Joseph A. Gallian, especially chapter 22 in the
39 * Third Edition.
40 */
41
42#include <crypto/twofish.h>
43#include <linux/bitops.h>
44#include <linux/crypto.h>
45#include <linux/errno.h>
46#include <linux/init.h>
47#include <linux/kernel.h>
48#include <linux/module.h>
49#include <linux/types.h>
50
51
52/* The large precomputed tables for the Twofish cipher (twofish.c)
53 * Taken from the same source as twofish.c
54 * Marc Mutz <Marc@Mutz.com>
55 */
56
57/* These two tables are the q0 and q1 permutations, exactly as described in
58 * the Twofish paper. */
59
60static const u8 q0[256] = {
61 0xA9, 0x67, 0xB3, 0xE8, 0x04, 0xFD, 0xA3, 0x76, 0x9A, 0x92, 0x80, 0x78,
62 0xE4, 0xDD, 0xD1, 0x38, 0x0D, 0xC6, 0x35, 0x98, 0x18, 0xF7, 0xEC, 0x6C,
63 0x43, 0x75, 0x37, 0x26, 0xFA, 0x13, 0x94, 0x48, 0xF2, 0xD0, 0x8B, 0x30,
64 0x84, 0x54, 0xDF, 0x23, 0x19, 0x5B, 0x3D, 0x59, 0xF3, 0xAE, 0xA2, 0x82,
65 0x63, 0x01, 0x83, 0x2E, 0xD9, 0x51, 0x9B, 0x7C, 0xA6, 0xEB, 0xA5, 0xBE,
66 0x16, 0x0C, 0xE3, 0x61, 0xC0, 0x8C, 0x3A, 0xF5, 0x73, 0x2C, 0x25, 0x0B,
67 0xBB, 0x4E, 0x89, 0x6B, 0x53, 0x6A, 0xB4, 0xF1, 0xE1, 0xE6, 0xBD, 0x45,
68 0xE2, 0xF4, 0xB6, 0x66, 0xCC, 0x95, 0x03, 0x56, 0xD4, 0x1C, 0x1E, 0xD7,
69 0xFB, 0xC3, 0x8E, 0xB5, 0xE9, 0xCF, 0xBF, 0xBA, 0xEA, 0x77, 0x39, 0xAF,
70 0x33, 0xC9, 0x62, 0x71, 0x81, 0x79, 0x09, 0xAD, 0x24, 0xCD, 0xF9, 0xD8,
71 0xE5, 0xC5, 0xB9, 0x4D, 0x44, 0x08, 0x86, 0xE7, 0xA1, 0x1D, 0xAA, 0xED,
72 0x06, 0x70, 0xB2, 0xD2, 0x41, 0x7B, 0xA0, 0x11, 0x31, 0xC2, 0x27, 0x90,
73 0x20, 0xF6, 0x60, 0xFF, 0x96, 0x5C, 0xB1, 0xAB, 0x9E, 0x9C, 0x52, 0x1B,
74 0x5F, 0x93, 0x0A, 0xEF, 0x91, 0x85, 0x49, 0xEE, 0x2D, 0x4F, 0x8F, 0x3B,
75 0x47, 0x87, 0x6D, 0x46, 0xD6, 0x3E, 0x69, 0x64, 0x2A, 0xCE, 0xCB, 0x2F,
76 0xFC, 0x97, 0x05, 0x7A, 0xAC, 0x7F, 0xD5, 0x1A, 0x4B, 0x0E, 0xA7, 0x5A,
77 0x28, 0x14, 0x3F, 0x29, 0x88, 0x3C, 0x4C, 0x02, 0xB8, 0xDA, 0xB0, 0x17,
78 0x55, 0x1F, 0x8A, 0x7D, 0x57, 0xC7, 0x8D, 0x74, 0xB7, 0xC4, 0x9F, 0x72,
79 0x7E, 0x15, 0x22, 0x12, 0x58, 0x07, 0x99, 0x34, 0x6E, 0x50, 0xDE, 0x68,
80 0x65, 0xBC, 0xDB, 0xF8, 0xC8, 0xA8, 0x2B, 0x40, 0xDC, 0xFE, 0x32, 0xA4,
81 0xCA, 0x10, 0x21, 0xF0, 0xD3, 0x5D, 0x0F, 0x00, 0x6F, 0x9D, 0x36, 0x42,
82 0x4A, 0x5E, 0xC1, 0xE0
83};
84
85static const u8 q1[256] = {
86 0x75, 0xF3, 0xC6, 0xF4, 0xDB, 0x7B, 0xFB, 0xC8, 0x4A, 0xD3, 0xE6, 0x6B,
87 0x45, 0x7D, 0xE8, 0x4B, 0xD6, 0x32, 0xD8, 0xFD, 0x37, 0x71, 0xF1, 0xE1,
88 0x30, 0x0F, 0xF8, 0x1B, 0x87, 0xFA, 0x06, 0x3F, 0x5E, 0xBA, 0xAE, 0x5B,
89 0x8A, 0x00, 0xBC, 0x9D, 0x6D, 0xC1, 0xB1, 0x0E, 0x80, 0x5D, 0xD2, 0xD5,
90 0xA0, 0x84, 0x07, 0x14, 0xB5, 0x90, 0x2C, 0xA3, 0xB2, 0x73, 0x4C, 0x54,
91 0x92, 0x74, 0x36, 0x51, 0x38, 0xB0, 0xBD, 0x5A, 0xFC, 0x60, 0x62, 0x96,
92 0x6C, 0x42, 0xF7, 0x10, 0x7C, 0x28, 0x27, 0x8C, 0x13, 0x95, 0x9C, 0xC7,
93 0x24, 0x46, 0x3B, 0x70, 0xCA, 0xE3, 0x85, 0xCB, 0x11, 0xD0, 0x93, 0xB8,
94 0xA6, 0x83, 0x20, 0xFF, 0x9F, 0x77, 0xC3, 0xCC, 0x03, 0x6F, 0x08, 0xBF,
95 0x40, 0xE7, 0x2B, 0xE2, 0x79, 0x0C, 0xAA, 0x82, 0x41, 0x3A, 0xEA, 0xB9,
96 0xE4, 0x9A, 0xA4, 0x97, 0x7E, 0xDA, 0x7A, 0x17, 0x66, 0x94, 0xA1, 0x1D,
97 0x3D, 0xF0, 0xDE, 0xB3, 0x0B, 0x72, 0xA7, 0x1C, 0xEF, 0xD1, 0x53, 0x3E,
98 0x8F, 0x33, 0x26, 0x5F, 0xEC, 0x76, 0x2A, 0x49, 0x81, 0x88, 0xEE, 0x21,
99 0xC4, 0x1A, 0xEB, 0xD9, 0xC5, 0x39, 0x99, 0xCD, 0xAD, 0x31, 0x8B, 0x01,
100 0x18, 0x23, 0xDD, 0x1F, 0x4E, 0x2D, 0xF9, 0x48, 0x4F, 0xF2, 0x65, 0x8E,
101 0x78, 0x5C, 0x58, 0x19, 0x8D, 0xE5, 0x98, 0x57, 0x67, 0x7F, 0x05, 0x64,
102 0xAF, 0x63, 0xB6, 0xFE, 0xF5, 0xB7, 0x3C, 0xA5, 0xCE, 0xE9, 0x68, 0x44,
103 0xE0, 0x4D, 0x43, 0x69, 0x29, 0x2E, 0xAC, 0x15, 0x59, 0xA8, 0x0A, 0x9E,
104 0x6E, 0x47, 0xDF, 0x34, 0x35, 0x6A, 0xCF, 0xDC, 0x22, 0xC9, 0xC0, 0x9B,
105 0x89, 0xD4, 0xED, 0xAB, 0x12, 0xA2, 0x0D, 0x52, 0xBB, 0x02, 0x2F, 0xA9,
106 0xD7, 0x61, 0x1E, 0xB4, 0x50, 0x04, 0xF6, 0xC2, 0x16, 0x25, 0x86, 0x56,
107 0x55, 0x09, 0xBE, 0x91
108};
109
110/* These MDS tables are actually tables of MDS composed with q0 and q1,
111 * because it is only ever used that way and we can save some time by
112 * precomputing. Of course the main saving comes from precomputing the
113 * GF(2^8) multiplication involved in the MDS matrix multiply; by looking
114 * things up in these tables we reduce the matrix multiply to four lookups
115 * and three XORs. Semi-formally, the definition of these tables is:
116 * mds[0][i] = MDS (q1[i] 0 0 0)^T mds[1][i] = MDS (0 q0[i] 0 0)^T
117 * mds[2][i] = MDS (0 0 q1[i] 0)^T mds[3][i] = MDS (0 0 0 q0[i])^T
118 * where ^T means "transpose", the matrix multiply is performed in GF(2^8)
119 * represented as GF(2)[x]/v(x) where v(x)=x^8+x^6+x^5+x^3+1 as described
120 * by Schneier et al, and I'm casually glossing over the byte/word
121 * conversion issues. */
122
123static const u32 mds[4][256] = {
124 {
125 0xBCBC3275, 0xECEC21F3, 0x202043C6, 0xB3B3C9F4, 0xDADA03DB, 0x02028B7B,
126 0xE2E22BFB, 0x9E9EFAC8, 0xC9C9EC4A, 0xD4D409D3, 0x18186BE6, 0x1E1E9F6B,
127 0x98980E45, 0xB2B2387D, 0xA6A6D2E8, 0x2626B74B, 0x3C3C57D6, 0x93938A32,
128 0x8282EED8, 0x525298FD, 0x7B7BD437, 0xBBBB3771, 0x5B5B97F1, 0x474783E1,
129 0x24243C30, 0x5151E20F, 0xBABAC6F8, 0x4A4AF31B, 0xBFBF4887, 0x0D0D70FA,
130 0xB0B0B306, 0x7575DE3F, 0xD2D2FD5E, 0x7D7D20BA, 0x666631AE, 0x3A3AA35B,
131 0x59591C8A, 0x00000000, 0xCDCD93BC, 0x1A1AE09D, 0xAEAE2C6D, 0x7F7FABC1,
132 0x2B2BC7B1, 0xBEBEB90E, 0xE0E0A080, 0x8A8A105D, 0x3B3B52D2, 0x6464BAD5,
133 0xD8D888A0, 0xE7E7A584, 0x5F5FE807, 0x1B1B1114, 0x2C2CC2B5, 0xFCFCB490,
134 0x3131272C, 0x808065A3, 0x73732AB2, 0x0C0C8173, 0x79795F4C, 0x6B6B4154,
135 0x4B4B0292, 0x53536974, 0x94948F36, 0x83831F51, 0x2A2A3638, 0xC4C49CB0,
136 0x2222C8BD, 0xD5D5F85A, 0xBDBDC3FC, 0x48487860, 0xFFFFCE62, 0x4C4C0796,
137 0x4141776C, 0xC7C7E642, 0xEBEB24F7, 0x1C1C1410, 0x5D5D637C, 0x36362228,
138 0x6767C027, 0xE9E9AF8C, 0x4444F913, 0x1414EA95, 0xF5F5BB9C, 0xCFCF18C7,
139 0x3F3F2D24, 0xC0C0E346, 0x7272DB3B, 0x54546C70, 0x29294CCA, 0xF0F035E3,
140 0x0808FE85, 0xC6C617CB, 0xF3F34F11, 0x8C8CE4D0, 0xA4A45993, 0xCACA96B8,
141 0x68683BA6, 0xB8B84D83, 0x38382820, 0xE5E52EFF, 0xADAD569F, 0x0B0B8477,
142 0xC8C81DC3, 0x9999FFCC, 0x5858ED03, 0x19199A6F, 0x0E0E0A08, 0x95957EBF,
143 0x70705040, 0xF7F730E7, 0x6E6ECF2B, 0x1F1F6EE2, 0xB5B53D79, 0x09090F0C,
144 0x616134AA, 0x57571682, 0x9F9F0B41, 0x9D9D803A, 0x111164EA, 0x2525CDB9,
145 0xAFAFDDE4, 0x4545089A, 0xDFDF8DA4, 0xA3A35C97, 0xEAEAD57E, 0x353558DA,
146 0xEDEDD07A, 0x4343FC17, 0xF8F8CB66, 0xFBFBB194, 0x3737D3A1, 0xFAFA401D,
147 0xC2C2683D, 0xB4B4CCF0, 0x32325DDE, 0x9C9C71B3, 0x5656E70B, 0xE3E3DA72,
148 0x878760A7, 0x15151B1C, 0xF9F93AEF, 0x6363BFD1, 0x3434A953, 0x9A9A853E,
149 0xB1B1428F, 0x7C7CD133, 0x88889B26, 0x3D3DA65F, 0xA1A1D7EC, 0xE4E4DF76,
150 0x8181942A, 0x91910149, 0x0F0FFB81, 0xEEEEAA88, 0x161661EE, 0xD7D77321,
151 0x9797F5C4, 0xA5A5A81A, 0xFEFE3FEB, 0x6D6DB5D9, 0x7878AEC5, 0xC5C56D39,
152 0x1D1DE599, 0x7676A4CD, 0x3E3EDCAD, 0xCBCB6731, 0xB6B6478B, 0xEFEF5B01,
153 0x12121E18, 0x6060C523, 0x6A6AB0DD, 0x4D4DF61F, 0xCECEE94E, 0xDEDE7C2D,
154 0x55559DF9, 0x7E7E5A48, 0x2121B24F, 0x03037AF2, 0xA0A02665, 0x5E5E198E,
155 0x5A5A6678, 0x65654B5C, 0x62624E58, 0xFDFD4519, 0x0606F48D, 0x404086E5,
156 0xF2F2BE98, 0x3333AC57, 0x17179067, 0x05058E7F, 0xE8E85E05, 0x4F4F7D64,
157 0x89896AAF, 0x10109563, 0x74742FB6, 0x0A0A75FE, 0x5C5C92F5, 0x9B9B74B7,
158 0x2D2D333C, 0x3030D6A5, 0x2E2E49CE, 0x494989E9, 0x46467268, 0x77775544,
159 0xA8A8D8E0, 0x9696044D, 0x2828BD43, 0xA9A92969, 0xD9D97929, 0x8686912E,
160 0xD1D187AC, 0xF4F44A15, 0x8D8D1559, 0xD6D682A8, 0xB9B9BC0A, 0x42420D9E,
161 0xF6F6C16E, 0x2F2FB847, 0xDDDD06DF, 0x23233934, 0xCCCC6235, 0xF1F1C46A,
162 0xC1C112CF, 0x8585EBDC, 0x8F8F9E22, 0x7171A1C9, 0x9090F0C0, 0xAAAA539B,
163 0x0101F189, 0x8B8BE1D4, 0x4E4E8CED, 0x8E8E6FAB, 0xABABA212, 0x6F6F3EA2,
164 0xE6E6540D, 0xDBDBF252, 0x92927BBB, 0xB7B7B602, 0x6969CA2F, 0x3939D9A9,
165 0xD3D30CD7, 0xA7A72361, 0xA2A2AD1E, 0xC3C399B4, 0x6C6C4450, 0x07070504,
166 0x04047FF6, 0x272746C2, 0xACACA716, 0xD0D07625, 0x50501386, 0xDCDCF756,
167 0x84841A55, 0xE1E15109, 0x7A7A25BE, 0x1313EF91},
168
169 {
170 0xA9D93939, 0x67901717, 0xB3719C9C, 0xE8D2A6A6, 0x04050707, 0xFD985252,
171 0xA3658080, 0x76DFE4E4, 0x9A084545, 0x92024B4B, 0x80A0E0E0, 0x78665A5A,
172 0xE4DDAFAF, 0xDDB06A6A, 0xD1BF6363, 0x38362A2A, 0x0D54E6E6, 0xC6432020,
173 0x3562CCCC, 0x98BEF2F2, 0x181E1212, 0xF724EBEB, 0xECD7A1A1, 0x6C774141,
174 0x43BD2828, 0x7532BCBC, 0x37D47B7B, 0x269B8888, 0xFA700D0D, 0x13F94444,
175 0x94B1FBFB, 0x485A7E7E, 0xF27A0303, 0xD0E48C8C, 0x8B47B6B6, 0x303C2424,
176 0x84A5E7E7, 0x54416B6B, 0xDF06DDDD, 0x23C56060, 0x1945FDFD, 0x5BA33A3A,
177 0x3D68C2C2, 0x59158D8D, 0xF321ECEC, 0xAE316666, 0xA23E6F6F, 0x82165757,
178 0x63951010, 0x015BEFEF, 0x834DB8B8, 0x2E918686, 0xD9B56D6D, 0x511F8383,
179 0x9B53AAAA, 0x7C635D5D, 0xA63B6868, 0xEB3FFEFE, 0xA5D63030, 0xBE257A7A,
180 0x16A7ACAC, 0x0C0F0909, 0xE335F0F0, 0x6123A7A7, 0xC0F09090, 0x8CAFE9E9,
181 0x3A809D9D, 0xF5925C5C, 0x73810C0C, 0x2C273131, 0x2576D0D0, 0x0BE75656,
182 0xBB7B9292, 0x4EE9CECE, 0x89F10101, 0x6B9F1E1E, 0x53A93434, 0x6AC4F1F1,
183 0xB499C3C3, 0xF1975B5B, 0xE1834747, 0xE66B1818, 0xBDC82222, 0x450E9898,
184 0xE26E1F1F, 0xF4C9B3B3, 0xB62F7474, 0x66CBF8F8, 0xCCFF9999, 0x95EA1414,
185 0x03ED5858, 0x56F7DCDC, 0xD4E18B8B, 0x1C1B1515, 0x1EADA2A2, 0xD70CD3D3,
186 0xFB2BE2E2, 0xC31DC8C8, 0x8E195E5E, 0xB5C22C2C, 0xE9894949, 0xCF12C1C1,
187 0xBF7E9595, 0xBA207D7D, 0xEA641111, 0x77840B0B, 0x396DC5C5, 0xAF6A8989,
188 0x33D17C7C, 0xC9A17171, 0x62CEFFFF, 0x7137BBBB, 0x81FB0F0F, 0x793DB5B5,
189 0x0951E1E1, 0xADDC3E3E, 0x242D3F3F, 0xCDA47676, 0xF99D5555, 0xD8EE8282,
190 0xE5864040, 0xC5AE7878, 0xB9CD2525, 0x4D049696, 0x44557777, 0x080A0E0E,
191 0x86135050, 0xE730F7F7, 0xA1D33737, 0x1D40FAFA, 0xAA346161, 0xED8C4E4E,
192 0x06B3B0B0, 0x706C5454, 0xB22A7373, 0xD2523B3B, 0x410B9F9F, 0x7B8B0202,
193 0xA088D8D8, 0x114FF3F3, 0x3167CBCB, 0xC2462727, 0x27C06767, 0x90B4FCFC,
194 0x20283838, 0xF67F0404, 0x60784848, 0xFF2EE5E5, 0x96074C4C, 0x5C4B6565,
195 0xB1C72B2B, 0xAB6F8E8E, 0x9E0D4242, 0x9CBBF5F5, 0x52F2DBDB, 0x1BF34A4A,
196 0x5FA63D3D, 0x9359A4A4, 0x0ABCB9B9, 0xEF3AF9F9, 0x91EF1313, 0x85FE0808,
197 0x49019191, 0xEE611616, 0x2D7CDEDE, 0x4FB22121, 0x8F42B1B1, 0x3BDB7272,
198 0x47B82F2F, 0x8748BFBF, 0x6D2CAEAE, 0x46E3C0C0, 0xD6573C3C, 0x3E859A9A,
199 0x6929A9A9, 0x647D4F4F, 0x2A948181, 0xCE492E2E, 0xCB17C6C6, 0x2FCA6969,
200 0xFCC3BDBD, 0x975CA3A3, 0x055EE8E8, 0x7AD0EDED, 0xAC87D1D1, 0x7F8E0505,
201 0xD5BA6464, 0x1AA8A5A5, 0x4BB72626, 0x0EB9BEBE, 0xA7608787, 0x5AF8D5D5,
202 0x28223636, 0x14111B1B, 0x3FDE7575, 0x2979D9D9, 0x88AAEEEE, 0x3C332D2D,
203 0x4C5F7979, 0x02B6B7B7, 0xB896CACA, 0xDA583535, 0xB09CC4C4, 0x17FC4343,
204 0x551A8484, 0x1FF64D4D, 0x8A1C5959, 0x7D38B2B2, 0x57AC3333, 0xC718CFCF,
205 0x8DF40606, 0x74695353, 0xB7749B9B, 0xC4F59797, 0x9F56ADAD, 0x72DAE3E3,
206 0x7ED5EAEA, 0x154AF4F4, 0x229E8F8F, 0x12A2ABAB, 0x584E6262, 0x07E85F5F,
207 0x99E51D1D, 0x34392323, 0x6EC1F6F6, 0x50446C6C, 0xDE5D3232, 0x68724646,
208 0x6526A0A0, 0xBC93CDCD, 0xDB03DADA, 0xF8C6BABA, 0xC8FA9E9E, 0xA882D6D6,
209 0x2BCF6E6E, 0x40507070, 0xDCEB8585, 0xFE750A0A, 0x328A9393, 0xA48DDFDF,
210 0xCA4C2929, 0x10141C1C, 0x2173D7D7, 0xF0CCB4B4, 0xD309D4D4, 0x5D108A8A,
211 0x0FE25151, 0x00000000, 0x6F9A1919, 0x9DE01A1A, 0x368F9494, 0x42E6C7C7,
212 0x4AECC9C9, 0x5EFDD2D2, 0xC1AB7F7F, 0xE0D8A8A8},
213
214 {
215 0xBC75BC32, 0xECF3EC21, 0x20C62043, 0xB3F4B3C9, 0xDADBDA03, 0x027B028B,
216 0xE2FBE22B, 0x9EC89EFA, 0xC94AC9EC, 0xD4D3D409, 0x18E6186B, 0x1E6B1E9F,
217 0x9845980E, 0xB27DB238, 0xA6E8A6D2, 0x264B26B7, 0x3CD63C57, 0x9332938A,
218 0x82D882EE, 0x52FD5298, 0x7B377BD4, 0xBB71BB37, 0x5BF15B97, 0x47E14783,
219 0x2430243C, 0x510F51E2, 0xBAF8BAC6, 0x4A1B4AF3, 0xBF87BF48, 0x0DFA0D70,
220 0xB006B0B3, 0x753F75DE, 0xD25ED2FD, 0x7DBA7D20, 0x66AE6631, 0x3A5B3AA3,
221 0x598A591C, 0x00000000, 0xCDBCCD93, 0x1A9D1AE0, 0xAE6DAE2C, 0x7FC17FAB,
222 0x2BB12BC7, 0xBE0EBEB9, 0xE080E0A0, 0x8A5D8A10, 0x3BD23B52, 0x64D564BA,
223 0xD8A0D888, 0xE784E7A5, 0x5F075FE8, 0x1B141B11, 0x2CB52CC2, 0xFC90FCB4,
224 0x312C3127, 0x80A38065, 0x73B2732A, 0x0C730C81, 0x794C795F, 0x6B546B41,
225 0x4B924B02, 0x53745369, 0x9436948F, 0x8351831F, 0x2A382A36, 0xC4B0C49C,
226 0x22BD22C8, 0xD55AD5F8, 0xBDFCBDC3, 0x48604878, 0xFF62FFCE, 0x4C964C07,
227 0x416C4177, 0xC742C7E6, 0xEBF7EB24, 0x1C101C14, 0x5D7C5D63, 0x36283622,
228 0x672767C0, 0xE98CE9AF, 0x441344F9, 0x149514EA, 0xF59CF5BB, 0xCFC7CF18,
229 0x3F243F2D, 0xC046C0E3, 0x723B72DB, 0x5470546C, 0x29CA294C, 0xF0E3F035,
230 0x088508FE, 0xC6CBC617, 0xF311F34F, 0x8CD08CE4, 0xA493A459, 0xCAB8CA96,
231 0x68A6683B, 0xB883B84D, 0x38203828, 0xE5FFE52E, 0xAD9FAD56, 0x0B770B84,
232 0xC8C3C81D, 0x99CC99FF, 0x580358ED, 0x196F199A, 0x0E080E0A, 0x95BF957E,
233 0x70407050, 0xF7E7F730, 0x6E2B6ECF, 0x1FE21F6E, 0xB579B53D, 0x090C090F,
234 0x61AA6134, 0x57825716, 0x9F419F0B, 0x9D3A9D80, 0x11EA1164, 0x25B925CD,
235 0xAFE4AFDD, 0x459A4508, 0xDFA4DF8D, 0xA397A35C, 0xEA7EEAD5, 0x35DA3558,
236 0xED7AEDD0, 0x431743FC, 0xF866F8CB, 0xFB94FBB1, 0x37A137D3, 0xFA1DFA40,
237 0xC23DC268, 0xB4F0B4CC, 0x32DE325D, 0x9CB39C71, 0x560B56E7, 0xE372E3DA,
238 0x87A78760, 0x151C151B, 0xF9EFF93A, 0x63D163BF, 0x345334A9, 0x9A3E9A85,
239 0xB18FB142, 0x7C337CD1, 0x8826889B, 0x3D5F3DA6, 0xA1ECA1D7, 0xE476E4DF,
240 0x812A8194, 0x91499101, 0x0F810FFB, 0xEE88EEAA, 0x16EE1661, 0xD721D773,
241 0x97C497F5, 0xA51AA5A8, 0xFEEBFE3F, 0x6DD96DB5, 0x78C578AE, 0xC539C56D,
242 0x1D991DE5, 0x76CD76A4, 0x3EAD3EDC, 0xCB31CB67, 0xB68BB647, 0xEF01EF5B,
243 0x1218121E, 0x602360C5, 0x6ADD6AB0, 0x4D1F4DF6, 0xCE4ECEE9, 0xDE2DDE7C,
244 0x55F9559D, 0x7E487E5A, 0x214F21B2, 0x03F2037A, 0xA065A026, 0x5E8E5E19,
245 0x5A785A66, 0x655C654B, 0x6258624E, 0xFD19FD45, 0x068D06F4, 0x40E54086,
246 0xF298F2BE, 0x335733AC, 0x17671790, 0x057F058E, 0xE805E85E, 0x4F644F7D,
247 0x89AF896A, 0x10631095, 0x74B6742F, 0x0AFE0A75, 0x5CF55C92, 0x9BB79B74,
248 0x2D3C2D33, 0x30A530D6, 0x2ECE2E49, 0x49E94989, 0x46684672, 0x77447755,
249 0xA8E0A8D8, 0x964D9604, 0x284328BD, 0xA969A929, 0xD929D979, 0x862E8691,
250 0xD1ACD187, 0xF415F44A, 0x8D598D15, 0xD6A8D682, 0xB90AB9BC, 0x429E420D,
251 0xF66EF6C1, 0x2F472FB8, 0xDDDFDD06, 0x23342339, 0xCC35CC62, 0xF16AF1C4,
252 0xC1CFC112, 0x85DC85EB, 0x8F228F9E, 0x71C971A1, 0x90C090F0, 0xAA9BAA53,
253 0x018901F1, 0x8BD48BE1, 0x4EED4E8C, 0x8EAB8E6F, 0xAB12ABA2, 0x6FA26F3E,
254 0xE60DE654, 0xDB52DBF2, 0x92BB927B, 0xB702B7B6, 0x692F69CA, 0x39A939D9,
255 0xD3D7D30C, 0xA761A723, 0xA21EA2AD, 0xC3B4C399, 0x6C506C44, 0x07040705,
256 0x04F6047F, 0x27C22746, 0xAC16ACA7, 0xD025D076, 0x50865013, 0xDC56DCF7,
257 0x8455841A, 0xE109E151, 0x7ABE7A25, 0x139113EF},
258
259 {
260 0xD939A9D9, 0x90176790, 0x719CB371, 0xD2A6E8D2, 0x05070405, 0x9852FD98,
261 0x6580A365, 0xDFE476DF, 0x08459A08, 0x024B9202, 0xA0E080A0, 0x665A7866,
262 0xDDAFE4DD, 0xB06ADDB0, 0xBF63D1BF, 0x362A3836, 0x54E60D54, 0x4320C643,
263 0x62CC3562, 0xBEF298BE, 0x1E12181E, 0x24EBF724, 0xD7A1ECD7, 0x77416C77,
264 0xBD2843BD, 0x32BC7532, 0xD47B37D4, 0x9B88269B, 0x700DFA70, 0xF94413F9,
265 0xB1FB94B1, 0x5A7E485A, 0x7A03F27A, 0xE48CD0E4, 0x47B68B47, 0x3C24303C,
266 0xA5E784A5, 0x416B5441, 0x06DDDF06, 0xC56023C5, 0x45FD1945, 0xA33A5BA3,
267 0x68C23D68, 0x158D5915, 0x21ECF321, 0x3166AE31, 0x3E6FA23E, 0x16578216,
268 0x95106395, 0x5BEF015B, 0x4DB8834D, 0x91862E91, 0xB56DD9B5, 0x1F83511F,
269 0x53AA9B53, 0x635D7C63, 0x3B68A63B, 0x3FFEEB3F, 0xD630A5D6, 0x257ABE25,
270 0xA7AC16A7, 0x0F090C0F, 0x35F0E335, 0x23A76123, 0xF090C0F0, 0xAFE98CAF,
271 0x809D3A80, 0x925CF592, 0x810C7381, 0x27312C27, 0x76D02576, 0xE7560BE7,
272 0x7B92BB7B, 0xE9CE4EE9, 0xF10189F1, 0x9F1E6B9F, 0xA93453A9, 0xC4F16AC4,
273 0x99C3B499, 0x975BF197, 0x8347E183, 0x6B18E66B, 0xC822BDC8, 0x0E98450E,
274 0x6E1FE26E, 0xC9B3F4C9, 0x2F74B62F, 0xCBF866CB, 0xFF99CCFF, 0xEA1495EA,
275 0xED5803ED, 0xF7DC56F7, 0xE18BD4E1, 0x1B151C1B, 0xADA21EAD, 0x0CD3D70C,
276 0x2BE2FB2B, 0x1DC8C31D, 0x195E8E19, 0xC22CB5C2, 0x8949E989, 0x12C1CF12,
277 0x7E95BF7E, 0x207DBA20, 0x6411EA64, 0x840B7784, 0x6DC5396D, 0x6A89AF6A,
278 0xD17C33D1, 0xA171C9A1, 0xCEFF62CE, 0x37BB7137, 0xFB0F81FB, 0x3DB5793D,
279 0x51E10951, 0xDC3EADDC, 0x2D3F242D, 0xA476CDA4, 0x9D55F99D, 0xEE82D8EE,
280 0x8640E586, 0xAE78C5AE, 0xCD25B9CD, 0x04964D04, 0x55774455, 0x0A0E080A,
281 0x13508613, 0x30F7E730, 0xD337A1D3, 0x40FA1D40, 0x3461AA34, 0x8C4EED8C,
282 0xB3B006B3, 0x6C54706C, 0x2A73B22A, 0x523BD252, 0x0B9F410B, 0x8B027B8B,
283 0x88D8A088, 0x4FF3114F, 0x67CB3167, 0x4627C246, 0xC06727C0, 0xB4FC90B4,
284 0x28382028, 0x7F04F67F, 0x78486078, 0x2EE5FF2E, 0x074C9607, 0x4B655C4B,
285 0xC72BB1C7, 0x6F8EAB6F, 0x0D429E0D, 0xBBF59CBB, 0xF2DB52F2, 0xF34A1BF3,
286 0xA63D5FA6, 0x59A49359, 0xBCB90ABC, 0x3AF9EF3A, 0xEF1391EF, 0xFE0885FE,
287 0x01914901, 0x6116EE61, 0x7CDE2D7C, 0xB2214FB2, 0x42B18F42, 0xDB723BDB,
288 0xB82F47B8, 0x48BF8748, 0x2CAE6D2C, 0xE3C046E3, 0x573CD657, 0x859A3E85,
289 0x29A96929, 0x7D4F647D, 0x94812A94, 0x492ECE49, 0x17C6CB17, 0xCA692FCA,
290 0xC3BDFCC3, 0x5CA3975C, 0x5EE8055E, 0xD0ED7AD0, 0x87D1AC87, 0x8E057F8E,
291 0xBA64D5BA, 0xA8A51AA8, 0xB7264BB7, 0xB9BE0EB9, 0x6087A760, 0xF8D55AF8,
292 0x22362822, 0x111B1411, 0xDE753FDE, 0x79D92979, 0xAAEE88AA, 0x332D3C33,
293 0x5F794C5F, 0xB6B702B6, 0x96CAB896, 0x5835DA58, 0x9CC4B09C, 0xFC4317FC,
294 0x1A84551A, 0xF64D1FF6, 0x1C598A1C, 0x38B27D38, 0xAC3357AC, 0x18CFC718,
295 0xF4068DF4, 0x69537469, 0x749BB774, 0xF597C4F5, 0x56AD9F56, 0xDAE372DA,
296 0xD5EA7ED5, 0x4AF4154A, 0x9E8F229E, 0xA2AB12A2, 0x4E62584E, 0xE85F07E8,
297 0xE51D99E5, 0x39233439, 0xC1F66EC1, 0x446C5044, 0x5D32DE5D, 0x72466872,
298 0x26A06526, 0x93CDBC93, 0x03DADB03, 0xC6BAF8C6, 0xFA9EC8FA, 0x82D6A882,
299 0xCF6E2BCF, 0x50704050, 0xEB85DCEB, 0x750AFE75, 0x8A93328A, 0x8DDFA48D,
300 0x4C29CA4C, 0x141C1014, 0x73D72173, 0xCCB4F0CC, 0x09D4D309, 0x108A5D10,
301 0xE2510FE2, 0x00000000, 0x9A196F9A, 0xE01A9DE0, 0x8F94368F, 0xE6C742E6,
302 0xECC94AEC, 0xFDD25EFD, 0xAB7FC1AB, 0xD8A8E0D8}
303};
304
305/* The exp_to_poly and poly_to_exp tables are used to perform efficient
306 * operations in GF(2^8) represented as GF(2)[x]/w(x) where
307 * w(x)=x^8+x^6+x^3+x^2+1. We care about doing that because it's part of the
308 * definition of the RS matrix in the key schedule. Elements of that field
309 * are polynomials of degree not greater than 7 and all coefficients 0 or 1,
310 * which can be represented naturally by bytes (just substitute x=2). In that
311 * form, GF(2^8) addition is the same as bitwise XOR, but GF(2^8)
312 * multiplication is inefficient without hardware support. To multiply
313 * faster, I make use of the fact x is a generator for the nonzero elements,
314 * so that every element p of GF(2)[x]/w(x) is either 0 or equal to (x)^n for
315 * some n in 0..254. Note that that caret is exponentiation in GF(2^8),
316 * *not* polynomial notation. So if I want to compute pq where p and q are
317 * in GF(2^8), I can just say:
318 * 1. if p=0 or q=0 then pq=0
319 * 2. otherwise, find m and n such that p=x^m and q=x^n
320 * 3. pq=(x^m)(x^n)=x^(m+n), so add m and n and find pq
321 * The translations in steps 2 and 3 are looked up in the tables
322 * poly_to_exp (for step 2) and exp_to_poly (for step 3). To see this
323 * in action, look at the CALC_S macro. As additional wrinkles, note that
324 * one of my operands is always a constant, so the poly_to_exp lookup on it
325 * is done in advance; I included the original values in the comments so
326 * readers can have some chance of recognizing that this *is* the RS matrix
327 * from the Twofish paper. I've only included the table entries I actually
328 * need; I never do a lookup on a variable input of zero and the biggest
329 * exponents I'll ever see are 254 (variable) and 237 (constant), so they'll
330 * never sum to more than 491. I'm repeating part of the exp_to_poly table
331 * so that I don't have to do mod-255 reduction in the exponent arithmetic.
332 * Since I know my constant operands are never zero, I only have to worry
333 * about zero values in the variable operand, and I do it with a simple
334 * conditional branch. I know conditionals are expensive, but I couldn't
335 * see a non-horrible way of avoiding them, and I did manage to group the
336 * statements so that each if covers four group multiplications. */
337
338static const u8 poly_to_exp[255] = {
339 0x00, 0x01, 0x17, 0x02, 0x2E, 0x18, 0x53, 0x03, 0x6A, 0x2F, 0x93, 0x19,
340 0x34, 0x54, 0x45, 0x04, 0x5C, 0x6B, 0xB6, 0x30, 0xA6, 0x94, 0x4B, 0x1A,
341 0x8C, 0x35, 0x81, 0x55, 0xAA, 0x46, 0x0D, 0x05, 0x24, 0x5D, 0x87, 0x6C,
342 0x9B, 0xB7, 0xC1, 0x31, 0x2B, 0xA7, 0xA3, 0x95, 0x98, 0x4C, 0xCA, 0x1B,
343 0xE6, 0x8D, 0x73, 0x36, 0xCD, 0x82, 0x12, 0x56, 0x62, 0xAB, 0xF0, 0x47,
344 0x4F, 0x0E, 0xBD, 0x06, 0xD4, 0x25, 0xD2, 0x5E, 0x27, 0x88, 0x66, 0x6D,
345 0xD6, 0x9C, 0x79, 0xB8, 0x08, 0xC2, 0xDF, 0x32, 0x68, 0x2C, 0xFD, 0xA8,
346 0x8A, 0xA4, 0x5A, 0x96, 0x29, 0x99, 0x22, 0x4D, 0x60, 0xCB, 0xE4, 0x1C,
347 0x7B, 0xE7, 0x3B, 0x8E, 0x9E, 0x74, 0xF4, 0x37, 0xD8, 0xCE, 0xF9, 0x83,
348 0x6F, 0x13, 0xB2, 0x57, 0xE1, 0x63, 0xDC, 0xAC, 0xC4, 0xF1, 0xAF, 0x48,
349 0x0A, 0x50, 0x42, 0x0F, 0xBA, 0xBE, 0xC7, 0x07, 0xDE, 0xD5, 0x78, 0x26,
350 0x65, 0xD3, 0xD1, 0x5F, 0xE3, 0x28, 0x21, 0x89, 0x59, 0x67, 0xFC, 0x6E,
351 0xB1, 0xD7, 0xF8, 0x9D, 0xF3, 0x7A, 0x3A, 0xB9, 0xC6, 0x09, 0x41, 0xC3,
352 0xAE, 0xE0, 0xDB, 0x33, 0x44, 0x69, 0x92, 0x2D, 0x52, 0xFE, 0x16, 0xA9,
353 0x0C, 0x8B, 0x80, 0xA5, 0x4A, 0x5B, 0xB5, 0x97, 0xC9, 0x2A, 0xA2, 0x9A,
354 0xC0, 0x23, 0x86, 0x4E, 0xBC, 0x61, 0xEF, 0xCC, 0x11, 0xE5, 0x72, 0x1D,
355 0x3D, 0x7C, 0xEB, 0xE8, 0xE9, 0x3C, 0xEA, 0x8F, 0x7D, 0x9F, 0xEC, 0x75,
356 0x1E, 0xF5, 0x3E, 0x38, 0xF6, 0xD9, 0x3F, 0xCF, 0x76, 0xFA, 0x1F, 0x84,
357 0xA0, 0x70, 0xED, 0x14, 0x90, 0xB3, 0x7E, 0x58, 0xFB, 0xE2, 0x20, 0x64,
358 0xD0, 0xDD, 0x77, 0xAD, 0xDA, 0xC5, 0x40, 0xF2, 0x39, 0xB0, 0xF7, 0x49,
359 0xB4, 0x0B, 0x7F, 0x51, 0x15, 0x43, 0x91, 0x10, 0x71, 0xBB, 0xEE, 0xBF,
360 0x85, 0xC8, 0xA1
361};
362
363static const u8 exp_to_poly[492] = {
364 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x4D, 0x9A, 0x79, 0xF2,
365 0xA9, 0x1F, 0x3E, 0x7C, 0xF8, 0xBD, 0x37, 0x6E, 0xDC, 0xF5, 0xA7, 0x03,
366 0x06, 0x0C, 0x18, 0x30, 0x60, 0xC0, 0xCD, 0xD7, 0xE3, 0x8B, 0x5B, 0xB6,
367 0x21, 0x42, 0x84, 0x45, 0x8A, 0x59, 0xB2, 0x29, 0x52, 0xA4, 0x05, 0x0A,
368 0x14, 0x28, 0x50, 0xA0, 0x0D, 0x1A, 0x34, 0x68, 0xD0, 0xED, 0x97, 0x63,
369 0xC6, 0xC1, 0xCF, 0xD3, 0xEB, 0x9B, 0x7B, 0xF6, 0xA1, 0x0F, 0x1E, 0x3C,
370 0x78, 0xF0, 0xAD, 0x17, 0x2E, 0x5C, 0xB8, 0x3D, 0x7A, 0xF4, 0xA5, 0x07,
371 0x0E, 0x1C, 0x38, 0x70, 0xE0, 0x8D, 0x57, 0xAE, 0x11, 0x22, 0x44, 0x88,
372 0x5D, 0xBA, 0x39, 0x72, 0xE4, 0x85, 0x47, 0x8E, 0x51, 0xA2, 0x09, 0x12,
373 0x24, 0x48, 0x90, 0x6D, 0xDA, 0xF9, 0xBF, 0x33, 0x66, 0xCC, 0xD5, 0xE7,
374 0x83, 0x4B, 0x96, 0x61, 0xC2, 0xC9, 0xDF, 0xF3, 0xAB, 0x1B, 0x36, 0x6C,
375 0xD8, 0xFD, 0xB7, 0x23, 0x46, 0x8C, 0x55, 0xAA, 0x19, 0x32, 0x64, 0xC8,
376 0xDD, 0xF7, 0xA3, 0x0B, 0x16, 0x2C, 0x58, 0xB0, 0x2D, 0x5A, 0xB4, 0x25,
377 0x4A, 0x94, 0x65, 0xCA, 0xD9, 0xFF, 0xB3, 0x2B, 0x56, 0xAC, 0x15, 0x2A,
378 0x54, 0xA8, 0x1D, 0x3A, 0x74, 0xE8, 0x9D, 0x77, 0xEE, 0x91, 0x6F, 0xDE,
379 0xF1, 0xAF, 0x13, 0x26, 0x4C, 0x98, 0x7D, 0xFA, 0xB9, 0x3F, 0x7E, 0xFC,
380 0xB5, 0x27, 0x4E, 0x9C, 0x75, 0xEA, 0x99, 0x7F, 0xFE, 0xB1, 0x2F, 0x5E,
381 0xBC, 0x35, 0x6A, 0xD4, 0xE5, 0x87, 0x43, 0x86, 0x41, 0x82, 0x49, 0x92,
382 0x69, 0xD2, 0xE9, 0x9F, 0x73, 0xE6, 0x81, 0x4F, 0x9E, 0x71, 0xE2, 0x89,
383 0x5F, 0xBE, 0x31, 0x62, 0xC4, 0xC5, 0xC7, 0xC3, 0xCB, 0xDB, 0xFB, 0xBB,
384 0x3B, 0x76, 0xEC, 0x95, 0x67, 0xCE, 0xD1, 0xEF, 0x93, 0x6B, 0xD6, 0xE1,
385 0x8F, 0x53, 0xA6, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x4D,
386 0x9A, 0x79, 0xF2, 0xA9, 0x1F, 0x3E, 0x7C, 0xF8, 0xBD, 0x37, 0x6E, 0xDC,
387 0xF5, 0xA7, 0x03, 0x06, 0x0C, 0x18, 0x30, 0x60, 0xC0, 0xCD, 0xD7, 0xE3,
388 0x8B, 0x5B, 0xB6, 0x21, 0x42, 0x84, 0x45, 0x8A, 0x59, 0xB2, 0x29, 0x52,
389 0xA4, 0x05, 0x0A, 0x14, 0x28, 0x50, 0xA0, 0x0D, 0x1A, 0x34, 0x68, 0xD0,
390 0xED, 0x97, 0x63, 0xC6, 0xC1, 0xCF, 0xD3, 0xEB, 0x9B, 0x7B, 0xF6, 0xA1,
391 0x0F, 0x1E, 0x3C, 0x78, 0xF0, 0xAD, 0x17, 0x2E, 0x5C, 0xB8, 0x3D, 0x7A,
392 0xF4, 0xA5, 0x07, 0x0E, 0x1C, 0x38, 0x70, 0xE0, 0x8D, 0x57, 0xAE, 0x11,
393 0x22, 0x44, 0x88, 0x5D, 0xBA, 0x39, 0x72, 0xE4, 0x85, 0x47, 0x8E, 0x51,
394 0xA2, 0x09, 0x12, 0x24, 0x48, 0x90, 0x6D, 0xDA, 0xF9, 0xBF, 0x33, 0x66,
395 0xCC, 0xD5, 0xE7, 0x83, 0x4B, 0x96, 0x61, 0xC2, 0xC9, 0xDF, 0xF3, 0xAB,
396 0x1B, 0x36, 0x6C, 0xD8, 0xFD, 0xB7, 0x23, 0x46, 0x8C, 0x55, 0xAA, 0x19,
397 0x32, 0x64, 0xC8, 0xDD, 0xF7, 0xA3, 0x0B, 0x16, 0x2C, 0x58, 0xB0, 0x2D,
398 0x5A, 0xB4, 0x25, 0x4A, 0x94, 0x65, 0xCA, 0xD9, 0xFF, 0xB3, 0x2B, 0x56,
399 0xAC, 0x15, 0x2A, 0x54, 0xA8, 0x1D, 0x3A, 0x74, 0xE8, 0x9D, 0x77, 0xEE,
400 0x91, 0x6F, 0xDE, 0xF1, 0xAF, 0x13, 0x26, 0x4C, 0x98, 0x7D, 0xFA, 0xB9,
401 0x3F, 0x7E, 0xFC, 0xB5, 0x27, 0x4E, 0x9C, 0x75, 0xEA, 0x99, 0x7F, 0xFE,
402 0xB1, 0x2F, 0x5E, 0xBC, 0x35, 0x6A, 0xD4, 0xE5, 0x87, 0x43, 0x86, 0x41,
403 0x82, 0x49, 0x92, 0x69, 0xD2, 0xE9, 0x9F, 0x73, 0xE6, 0x81, 0x4F, 0x9E,
404 0x71, 0xE2, 0x89, 0x5F, 0xBE, 0x31, 0x62, 0xC4, 0xC5, 0xC7, 0xC3, 0xCB
405};
406
407
408/* The table constants are indices of
409 * S-box entries, preprocessed through q0 and q1. */
410static const u8 calc_sb_tbl[512] = {
411 0xA9, 0x75, 0x67, 0xF3, 0xB3, 0xC6, 0xE8, 0xF4,
412 0x04, 0xDB, 0xFD, 0x7B, 0xA3, 0xFB, 0x76, 0xC8,
413 0x9A, 0x4A, 0x92, 0xD3, 0x80, 0xE6, 0x78, 0x6B,
414 0xE4, 0x45, 0xDD, 0x7D, 0xD1, 0xE8, 0x38, 0x4B,
415 0x0D, 0xD6, 0xC6, 0x32, 0x35, 0xD8, 0x98, 0xFD,
416 0x18, 0x37, 0xF7, 0x71, 0xEC, 0xF1, 0x6C, 0xE1,
417 0x43, 0x30, 0x75, 0x0F, 0x37, 0xF8, 0x26, 0x1B,
418 0xFA, 0x87, 0x13, 0xFA, 0x94, 0x06, 0x48, 0x3F,
419 0xF2, 0x5E, 0xD0, 0xBA, 0x8B, 0xAE, 0x30, 0x5B,
420 0x84, 0x8A, 0x54, 0x00, 0xDF, 0xBC, 0x23, 0x9D,
421 0x19, 0x6D, 0x5B, 0xC1, 0x3D, 0xB1, 0x59, 0x0E,
422 0xF3, 0x80, 0xAE, 0x5D, 0xA2, 0xD2, 0x82, 0xD5,
423 0x63, 0xA0, 0x01, 0x84, 0x83, 0x07, 0x2E, 0x14,
424 0xD9, 0xB5, 0x51, 0x90, 0x9B, 0x2C, 0x7C, 0xA3,
425 0xA6, 0xB2, 0xEB, 0x73, 0xA5, 0x4C, 0xBE, 0x54,
426 0x16, 0x92, 0x0C, 0x74, 0xE3, 0x36, 0x61, 0x51,
427 0xC0, 0x38, 0x8C, 0xB0, 0x3A, 0xBD, 0xF5, 0x5A,
428 0x73, 0xFC, 0x2C, 0x60, 0x25, 0x62, 0x0B, 0x96,
429 0xBB, 0x6C, 0x4E, 0x42, 0x89, 0xF7, 0x6B, 0x10,
430 0x53, 0x7C, 0x6A, 0x28, 0xB4, 0x27, 0xF1, 0x8C,
431 0xE1, 0x13, 0xE6, 0x95, 0xBD, 0x9C, 0x45, 0xC7,
432 0xE2, 0x24, 0xF4, 0x46, 0xB6, 0x3B, 0x66, 0x70,
433 0xCC, 0xCA, 0x95, 0xE3, 0x03, 0x85, 0x56, 0xCB,
434 0xD4, 0x11, 0x1C, 0xD0, 0x1E, 0x93, 0xD7, 0xB8,
435 0xFB, 0xA6, 0xC3, 0x83, 0x8E, 0x20, 0xB5, 0xFF,
436 0xE9, 0x9F, 0xCF, 0x77, 0xBF, 0xC3, 0xBA, 0xCC,
437 0xEA, 0x03, 0x77, 0x6F, 0x39, 0x08, 0xAF, 0xBF,
438 0x33, 0x40, 0xC9, 0xE7, 0x62, 0x2B, 0x71, 0xE2,
439 0x81, 0x79, 0x79, 0x0C, 0x09, 0xAA, 0xAD, 0x82,
440 0x24, 0x41, 0xCD, 0x3A, 0xF9, 0xEA, 0xD8, 0xB9,
441 0xE5, 0xE4, 0xC5, 0x9A, 0xB9, 0xA4, 0x4D, 0x97,
442 0x44, 0x7E, 0x08, 0xDA, 0x86, 0x7A, 0xE7, 0x17,
443 0xA1, 0x66, 0x1D, 0x94, 0xAA, 0xA1, 0xED, 0x1D,
444 0x06, 0x3D, 0x70, 0xF0, 0xB2, 0xDE, 0xD2, 0xB3,
445 0x41, 0x0B, 0x7B, 0x72, 0xA0, 0xA7, 0x11, 0x1C,
446 0x31, 0xEF, 0xC2, 0xD1, 0x27, 0x53, 0x90, 0x3E,
447 0x20, 0x8F, 0xF6, 0x33, 0x60, 0x26, 0xFF, 0x5F,
448 0x96, 0xEC, 0x5C, 0x76, 0xB1, 0x2A, 0xAB, 0x49,
449 0x9E, 0x81, 0x9C, 0x88, 0x52, 0xEE, 0x1B, 0x21,
450 0x5F, 0xC4, 0x93, 0x1A, 0x0A, 0xEB, 0xEF, 0xD9,
451 0x91, 0xC5, 0x85, 0x39, 0x49, 0x99, 0xEE, 0xCD,
452 0x2D, 0xAD, 0x4F, 0x31, 0x8F, 0x8B, 0x3B, 0x01,
453 0x47, 0x18, 0x87, 0x23, 0x6D, 0xDD, 0x46, 0x1F,
454 0xD6, 0x4E, 0x3E, 0x2D, 0x69, 0xF9, 0x64, 0x48,
455 0x2A, 0x4F, 0xCE, 0xF2, 0xCB, 0x65, 0x2F, 0x8E,
456 0xFC, 0x78, 0x97, 0x5C, 0x05, 0x58, 0x7A, 0x19,
457 0xAC, 0x8D, 0x7F, 0xE5, 0xD5, 0x98, 0x1A, 0x57,
458 0x4B, 0x67, 0x0E, 0x7F, 0xA7, 0x05, 0x5A, 0x64,
459 0x28, 0xAF, 0x14, 0x63, 0x3F, 0xB6, 0x29, 0xFE,
460 0x88, 0xF5, 0x3C, 0xB7, 0x4C, 0x3C, 0x02, 0xA5,
461 0xB8, 0xCE, 0xDA, 0xE9, 0xB0, 0x68, 0x17, 0x44,
462 0x55, 0xE0, 0x1F, 0x4D, 0x8A, 0x43, 0x7D, 0x69,
463 0x57, 0x29, 0xC7, 0x2E, 0x8D, 0xAC, 0x74, 0x15,
464 0xB7, 0x59, 0xC4, 0xA8, 0x9F, 0x0A, 0x72, 0x9E,
465 0x7E, 0x6E, 0x15, 0x47, 0x22, 0xDF, 0x12, 0x34,
466 0x58, 0x35, 0x07, 0x6A, 0x99, 0xCF, 0x34, 0xDC,
467 0x6E, 0x22, 0x50, 0xC9, 0xDE, 0xC0, 0x68, 0x9B,
468 0x65, 0x89, 0xBC, 0xD4, 0xDB, 0xED, 0xF8, 0xAB,
469 0xC8, 0x12, 0xA8, 0xA2, 0x2B, 0x0D, 0x40, 0x52,
470 0xDC, 0xBB, 0xFE, 0x02, 0x32, 0x2F, 0xA4, 0xA9,
471 0xCA, 0xD7, 0x10, 0x61, 0x21, 0x1E, 0xF0, 0xB4,
472 0xD3, 0x50, 0x5D, 0x04, 0x0F, 0xF6, 0x00, 0xC2,
473 0x6F, 0x16, 0x9D, 0x25, 0x36, 0x86, 0x42, 0x56,
474 0x4A, 0x55, 0x5E, 0x09, 0xC1, 0xBE, 0xE0, 0x91
475};
476
477/* Macro to perform one column of the RS matrix multiplication. The
478 * parameters a, b, c, and d are the four bytes of output; i is the index
479 * of the key bytes, and w, x, y, and z, are the column of constants from
480 * the RS matrix, preprocessed through the poly_to_exp table. */
481
482#define CALC_S(a, b, c, d, i, w, x, y, z) \
483 if (key[i]) { \
484 tmp = poly_to_exp[key[i] - 1]; \
485 (a) ^= exp_to_poly[tmp + (w)]; \
486 (b) ^= exp_to_poly[tmp + (x)]; \
487 (c) ^= exp_to_poly[tmp + (y)]; \
488 (d) ^= exp_to_poly[tmp + (z)]; \
489 }
490
491/* Macros to calculate the key-dependent S-boxes for a 128-bit key using
492 * the S vector from CALC_S. CALC_SB_2 computes a single entry in all
493 * four S-boxes, where i is the index of the entry to compute, and a and b
494 * are the index numbers preprocessed through the q0 and q1 tables
495 * respectively. */
496
497#define CALC_SB_2(i, a, b) \
498 ctx->s[0][i] = mds[0][q0[(a) ^ sa] ^ se]; \
499 ctx->s[1][i] = mds[1][q0[(b) ^ sb] ^ sf]; \
500 ctx->s[2][i] = mds[2][q1[(a) ^ sc] ^ sg]; \
501 ctx->s[3][i] = mds[3][q1[(b) ^ sd] ^ sh]
502
503/* Macro exactly like CALC_SB_2, but for 192-bit keys. */
504
505#define CALC_SB192_2(i, a, b) \
506 ctx->s[0][i] = mds[0][q0[q0[(b) ^ sa] ^ se] ^ si]; \
507 ctx->s[1][i] = mds[1][q0[q1[(b) ^ sb] ^ sf] ^ sj]; \
508 ctx->s[2][i] = mds[2][q1[q0[(a) ^ sc] ^ sg] ^ sk]; \
509 ctx->s[3][i] = mds[3][q1[q1[(a) ^ sd] ^ sh] ^ sl];
510
511/* Macro exactly like CALC_SB_2, but for 256-bit keys. */
512
513#define CALC_SB256_2(i, a, b) \
514 ctx->s[0][i] = mds[0][q0[q0[q1[(b) ^ sa] ^ se] ^ si] ^ sm]; \
515 ctx->s[1][i] = mds[1][q0[q1[q1[(a) ^ sb] ^ sf] ^ sj] ^ sn]; \
516 ctx->s[2][i] = mds[2][q1[q0[q0[(a) ^ sc] ^ sg] ^ sk] ^ so]; \
517 ctx->s[3][i] = mds[3][q1[q1[q0[(b) ^ sd] ^ sh] ^ sl] ^ sp];
518
519/* Macros to calculate the whitening and round subkeys. CALC_K_2 computes the
520 * last two stages of the h() function for a given index (either 2i or 2i+1).
521 * a, b, c, and d are the four bytes going into the last two stages. For
522 * 128-bit keys, this is the entire h() function and a and c are the index
523 * preprocessed through q0 and q1 respectively; for longer keys they are the
524 * output of previous stages. j is the index of the first key byte to use.
525 * CALC_K computes a pair of subkeys for 128-bit Twofish, by calling CALC_K_2
526 * twice, doing the Pseudo-Hadamard Transform, and doing the necessary
527 * rotations. Its parameters are: a, the array to write the results into,
528 * j, the index of the first output entry, k and l, the preprocessed indices
529 * for index 2i, and m and n, the preprocessed indices for index 2i+1.
530 * CALC_K192_2 expands CALC_K_2 to handle 192-bit keys, by doing an
531 * additional lookup-and-XOR stage. The parameters a, b, c and d are the
532 * four bytes going into the last three stages. For 192-bit keys, c = d
533 * are the index preprocessed through q0, and a = b are the index
534 * preprocessed through q1; j is the index of the first key byte to use.
535 * CALC_K192 is identical to CALC_K but for using the CALC_K192_2 macro
536 * instead of CALC_K_2.
537 * CALC_K256_2 expands CALC_K192_2 to handle 256-bit keys, by doing an
538 * additional lookup-and-XOR stage. The parameters a and b are the index
539 * preprocessed through q0 and q1 respectively; j is the index of the first
540 * key byte to use. CALC_K256 is identical to CALC_K but for using the
541 * CALC_K256_2 macro instead of CALC_K_2. */
542
543#define CALC_K_2(a, b, c, d, j) \
544 mds[0][q0[a ^ key[(j) + 8]] ^ key[j]] \
545 ^ mds[1][q0[b ^ key[(j) + 9]] ^ key[(j) + 1]] \
546 ^ mds[2][q1[c ^ key[(j) + 10]] ^ key[(j) + 2]] \
547 ^ mds[3][q1[d ^ key[(j) + 11]] ^ key[(j) + 3]]
548
549#define CALC_K(a, j, k, l, m, n) \
550 x = CALC_K_2 (k, l, k, l, 0); \
551 y = CALC_K_2 (m, n, m, n, 4); \
552 y = rol32(y, 8); \
553 x += y; y += x; ctx->a[j] = x; \
554 ctx->a[(j) + 1] = rol32(y, 9)
555
556#define CALC_K192_2(a, b, c, d, j) \
557 CALC_K_2 (q0[a ^ key[(j) + 16]], \
558 q1[b ^ key[(j) + 17]], \
559 q0[c ^ key[(j) + 18]], \
560 q1[d ^ key[(j) + 19]], j)
561
562#define CALC_K192(a, j, k, l, m, n) \
563 x = CALC_K192_2 (l, l, k, k, 0); \
564 y = CALC_K192_2 (n, n, m, m, 4); \
565 y = rol32(y, 8); \
566 x += y; y += x; ctx->a[j] = x; \
567 ctx->a[(j) + 1] = rol32(y, 9)
568
569#define CALC_K256_2(a, b, j) \
570 CALC_K192_2 (q1[b ^ key[(j) + 24]], \
571 q1[a ^ key[(j) + 25]], \
572 q0[a ^ key[(j) + 26]], \
573 q0[b ^ key[(j) + 27]], j)
574
575#define CALC_K256(a, j, k, l, m, n) \
576 x = CALC_K256_2 (k, l, 0); \
577 y = CALC_K256_2 (m, n, 4); \
578 y = rol32(y, 8); \
579 x += y; y += x; ctx->a[j] = x; \
580 ctx->a[(j) + 1] = rol32(y, 9)
581
582/* Perform the key setup. */
583int twofish_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int key_len)
584{
585
586 struct twofish_ctx *ctx = crypto_tfm_ctx(tfm);
587 u32 *flags = &tfm->crt_flags;
588
589 int i, j, k;
590
591 /* Temporaries for CALC_K. */
592 u32 x, y;
593
594 /* The S vector used to key the S-boxes, split up into individual bytes.
595 * 128-bit keys use only sa through sh; 256-bit use all of them. */
596 u8 sa = 0, sb = 0, sc = 0, sd = 0, se = 0, sf = 0, sg = 0, sh = 0;
597 u8 si = 0, sj = 0, sk = 0, sl = 0, sm = 0, sn = 0, so = 0, sp = 0;
598
599 /* Temporary for CALC_S. */
600 u8 tmp;
601
602 /* Check key length. */
603 if (key_len % 8)
604 {
605 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
606 return -EINVAL; /* unsupported key length */
607 }
608
609 /* Compute the first two words of the S vector. The magic numbers are
610 * the entries of the RS matrix, preprocessed through poly_to_exp. The
611 * numbers in the comments are the original (polynomial form) matrix
612 * entries. */
613 CALC_S (sa, sb, sc, sd, 0, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */
614 CALC_S (sa, sb, sc, sd, 1, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */
615 CALC_S (sa, sb, sc, sd, 2, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */
616 CALC_S (sa, sb, sc, sd, 3, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */
617 CALC_S (sa, sb, sc, sd, 4, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */
618 CALC_S (sa, sb, sc, sd, 5, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */
619 CALC_S (sa, sb, sc, sd, 6, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */
620 CALC_S (sa, sb, sc, sd, 7, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */
621 CALC_S (se, sf, sg, sh, 8, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */
622 CALC_S (se, sf, sg, sh, 9, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */
623 CALC_S (se, sf, sg, sh, 10, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */
624 CALC_S (se, sf, sg, sh, 11, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */
625 CALC_S (se, sf, sg, sh, 12, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */
626 CALC_S (se, sf, sg, sh, 13, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */
627 CALC_S (se, sf, sg, sh, 14, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */
628 CALC_S (se, sf, sg, sh, 15, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */
629
630 if (key_len == 24 || key_len == 32) { /* 192- or 256-bit key */
631 /* Calculate the third word of the S vector */
632 CALC_S (si, sj, sk, sl, 16, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */
633 CALC_S (si, sj, sk, sl, 17, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */
634 CALC_S (si, sj, sk, sl, 18, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */
635 CALC_S (si, sj, sk, sl, 19, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */
636 CALC_S (si, sj, sk, sl, 20, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */
637 CALC_S (si, sj, sk, sl, 21, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */
638 CALC_S (si, sj, sk, sl, 22, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */
639 CALC_S (si, sj, sk, sl, 23, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */
640 }
641
642 if (key_len == 32) { /* 256-bit key */
643 /* Calculate the fourth word of the S vector */
644 CALC_S (sm, sn, so, sp, 24, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */
645 CALC_S (sm, sn, so, sp, 25, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */
646 CALC_S (sm, sn, so, sp, 26, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */
647 CALC_S (sm, sn, so, sp, 27, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */
648 CALC_S (sm, sn, so, sp, 28, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */
649 CALC_S (sm, sn, so, sp, 29, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */
650 CALC_S (sm, sn, so, sp, 30, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */
651 CALC_S (sm, sn, so, sp, 31, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */
652
653 /* Compute the S-boxes. */
654 for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) {
655 CALC_SB256_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
656 }
657
658 /* Calculate whitening and round subkeys. The constants are
659 * indices of subkeys, preprocessed through q0 and q1. */
660 CALC_K256 (w, 0, 0xA9, 0x75, 0x67, 0xF3);
661 CALC_K256 (w, 2, 0xB3, 0xC6, 0xE8, 0xF4);
662 CALC_K256 (w, 4, 0x04, 0xDB, 0xFD, 0x7B);
663 CALC_K256 (w, 6, 0xA3, 0xFB, 0x76, 0xC8);
664 CALC_K256 (k, 0, 0x9A, 0x4A, 0x92, 0xD3);
665 CALC_K256 (k, 2, 0x80, 0xE6, 0x78, 0x6B);
666 CALC_K256 (k, 4, 0xE4, 0x45, 0xDD, 0x7D);
667 CALC_K256 (k, 6, 0xD1, 0xE8, 0x38, 0x4B);
668 CALC_K256 (k, 8, 0x0D, 0xD6, 0xC6, 0x32);
669 CALC_K256 (k, 10, 0x35, 0xD8, 0x98, 0xFD);
670 CALC_K256 (k, 12, 0x18, 0x37, 0xF7, 0x71);
671 CALC_K256 (k, 14, 0xEC, 0xF1, 0x6C, 0xE1);
672 CALC_K256 (k, 16, 0x43, 0x30, 0x75, 0x0F);
673 CALC_K256 (k, 18, 0x37, 0xF8, 0x26, 0x1B);
674 CALC_K256 (k, 20, 0xFA, 0x87, 0x13, 0xFA);
675 CALC_K256 (k, 22, 0x94, 0x06, 0x48, 0x3F);
676 CALC_K256 (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
677 CALC_K256 (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
678 CALC_K256 (k, 28, 0x84, 0x8A, 0x54, 0x00);
679 CALC_K256 (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
680 } else if (key_len == 24) { /* 192-bit key */
681 /* Compute the S-boxes. */
682 for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) {
683 CALC_SB192_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
684 }
685
686 /* Calculate whitening and round subkeys. The constants are
687 * indices of subkeys, preprocessed through q0 and q1. */
688 CALC_K192 (w, 0, 0xA9, 0x75, 0x67, 0xF3);
689 CALC_K192 (w, 2, 0xB3, 0xC6, 0xE8, 0xF4);
690 CALC_K192 (w, 4, 0x04, 0xDB, 0xFD, 0x7B);
691 CALC_K192 (w, 6, 0xA3, 0xFB, 0x76, 0xC8);
692 CALC_K192 (k, 0, 0x9A, 0x4A, 0x92, 0xD3);
693 CALC_K192 (k, 2, 0x80, 0xE6, 0x78, 0x6B);
694 CALC_K192 (k, 4, 0xE4, 0x45, 0xDD, 0x7D);
695 CALC_K192 (k, 6, 0xD1, 0xE8, 0x38, 0x4B);
696 CALC_K192 (k, 8, 0x0D, 0xD6, 0xC6, 0x32);
697 CALC_K192 (k, 10, 0x35, 0xD8, 0x98, 0xFD);
698 CALC_K192 (k, 12, 0x18, 0x37, 0xF7, 0x71);
699 CALC_K192 (k, 14, 0xEC, 0xF1, 0x6C, 0xE1);
700 CALC_K192 (k, 16, 0x43, 0x30, 0x75, 0x0F);
701 CALC_K192 (k, 18, 0x37, 0xF8, 0x26, 0x1B);
702 CALC_K192 (k, 20, 0xFA, 0x87, 0x13, 0xFA);
703 CALC_K192 (k, 22, 0x94, 0x06, 0x48, 0x3F);
704 CALC_K192 (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
705 CALC_K192 (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
706 CALC_K192 (k, 28, 0x84, 0x8A, 0x54, 0x00);
707 CALC_K192 (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
708 } else { /* 128-bit key */
709 /* Compute the S-boxes. */
710 for ( i = j = 0, k = 1; i < 256; i++, j += 2, k += 2 ) {
711 CALC_SB_2( i, calc_sb_tbl[j], calc_sb_tbl[k] );
712 }
713
714 /* Calculate whitening and round subkeys. The constants are
715 * indices of subkeys, preprocessed through q0 and q1. */
716 CALC_K (w, 0, 0xA9, 0x75, 0x67, 0xF3);
717 CALC_K (w, 2, 0xB3, 0xC6, 0xE8, 0xF4);
718 CALC_K (w, 4, 0x04, 0xDB, 0xFD, 0x7B);
719 CALC_K (w, 6, 0xA3, 0xFB, 0x76, 0xC8);
720 CALC_K (k, 0, 0x9A, 0x4A, 0x92, 0xD3);
721 CALC_K (k, 2, 0x80, 0xE6, 0x78, 0x6B);
722 CALC_K (k, 4, 0xE4, 0x45, 0xDD, 0x7D);
723 CALC_K (k, 6, 0xD1, 0xE8, 0x38, 0x4B);
724 CALC_K (k, 8, 0x0D, 0xD6, 0xC6, 0x32);
725 CALC_K (k, 10, 0x35, 0xD8, 0x98, 0xFD);
726 CALC_K (k, 12, 0x18, 0x37, 0xF7, 0x71);
727 CALC_K (k, 14, 0xEC, 0xF1, 0x6C, 0xE1);
728 CALC_K (k, 16, 0x43, 0x30, 0x75, 0x0F);
729 CALC_K (k, 18, 0x37, 0xF8, 0x26, 0x1B);
730 CALC_K (k, 20, 0xFA, 0x87, 0x13, 0xFA);
731 CALC_K (k, 22, 0x94, 0x06, 0x48, 0x3F);
732 CALC_K (k, 24, 0xF2, 0x5E, 0xD0, 0xBA);
733 CALC_K (k, 26, 0x8B, 0xAE, 0x30, 0x5B);
734 CALC_K (k, 28, 0x84, 0x8A, 0x54, 0x00);
735 CALC_K (k, 30, 0xDF, 0xBC, 0x23, 0x9D);
736 }
737
738 return 0;
739}
740
741EXPORT_SYMBOL_GPL(twofish_setkey);
742
743MODULE_LICENSE("GPL");
744MODULE_DESCRIPTION("Twofish cipher common functions");