aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2008-12-28 14:43:22 -0500
committerLinus Torvalds <torvalds@linux-foundation.org>2008-12-28 14:43:22 -0500
commite14e61e967f2b3bdf23f05e4ae5b9aa830151a44 (patch)
tree9412c94cbe37bf6f0d0bd9ad2d8b907ce23eb1db
parentcb10ea549fdc0ab2dd8988adab5bf40b4fa642f3 (diff)
parent0ee4a96902dd7858e65f378c86f428a0355bd841 (diff)
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (57 commits) crypto: aes - Precompute tables crypto: talitos - Ack done interrupt in isr instead of tasklet crypto: testmgr - Correct comment about deflate parameters crypto: salsa20 - Remove private wrappers around various operations crypto: des3_ede - permit weak keys unless REQ_WEAK_KEY set crypto: sha512 - Switch to shash crypto: sha512 - Move message schedule W[80] to static percpu area crypto: michael_mic - Switch to shash crypto: wp512 - Switch to shash crypto: tgr192 - Switch to shash crypto: sha256 - Switch to shash crypto: md5 - Switch to shash crypto: md4 - Switch to shash crypto: sha1 - Switch to shash crypto: rmd320 - Switch to shash crypto: rmd256 - Switch to shash crypto: rmd160 - Switch to shash crypto: rmd128 - Switch to shash crypto: null - Switch to shash crypto: hash - Make setkey optional ...
-rw-r--r--arch/x86/crypto/crc32c-intel.c121
-rw-r--r--crypto/Kconfig29
-rw-r--r--crypto/Makefile1
-rw-r--r--crypto/aes_generic.c1145
-rw-r--r--crypto/ahash.c38
-rw-r--r--crypto/ansi_cprng.c35
-rw-r--r--crypto/api.c121
-rw-r--r--crypto/authenc.c3
-rw-r--r--crypto/camellia.c84
-rw-r--r--crypto/crc32c.c290
-rw-r--r--crypto/crypto_null.c64
-rw-r--r--crypto/des_generic.c5
-rw-r--r--crypto/fcrypt.c8
-rw-r--r--crypto/hmac.c10
-rw-r--r--crypto/internal.h2
-rw-r--r--crypto/md4.c56
-rw-r--r--crypto/md5.c50
-rw-r--r--crypto/michael_mic.c72
-rw-r--r--crypto/proc.c20
-rw-r--r--crypto/rmd128.c61
-rw-r--r--crypto/rmd160.c61
-rw-r--r--crypto/rmd256.c61
-rw-r--r--crypto/rmd320.c61
-rw-r--r--crypto/salsa20_generic.c75
-rw-r--r--crypto/sha1_generic.c56
-rw-r--r--crypto/sha256_generic.c104
-rw-r--r--crypto/sha512_generic.c127
-rw-r--r--crypto/shash.c508
-rw-r--r--crypto/testmgr.c76
-rw-r--r--crypto/testmgr.h2
-rw-r--r--crypto/tgr192.c135
-rw-r--r--crypto/wp512.c121
-rw-r--r--drivers/crypto/hifn_795x.c494
-rw-r--r--drivers/crypto/padlock-aes.c52
-rw-r--r--drivers/crypto/talitos.c153
-rw-r--r--drivers/crypto/talitos.h85
-rw-r--r--include/crypto/aes.h8
-rw-r--r--include/crypto/algapi.h16
-rw-r--r--include/crypto/hash.h125
-rw-r--r--include/crypto/internal/hash.h16
-rw-r--r--include/linux/crc32c.h6
-rw-r--r--include/linux/crypto.h10
-rw-r--r--lib/Kconfig2
-rw-r--r--lib/libcrc32c.c182
44 files changed, 3331 insertions, 1420 deletions
diff --git a/arch/x86/crypto/crc32c-intel.c b/arch/x86/crypto/crc32c-intel.c
index 070afc5b6c94..b9d00261703c 100644
--- a/arch/x86/crypto/crc32c-intel.c
+++ b/arch/x86/crypto/crc32c-intel.c
@@ -6,13 +6,22 @@
6 * Intel(R) 64 and IA-32 Architectures Software Developer's Manual 6 * Intel(R) 64 and IA-32 Architectures Software Developer's Manual
7 * Volume 2A: Instruction Set Reference, A-M 7 * Volume 2A: Instruction Set Reference, A-M
8 * 8 *
9 * Copyright (c) 2008 Austin Zhang <austin_zhang@linux.intel.com> 9 * Copyright (C) 2008 Intel Corporation
10 * Copyright (c) 2008 Kent Liu <kent.liu@intel.com> 10 * Authors: Austin Zhang <austin_zhang@linux.intel.com>
11 * Kent Liu <kent.liu@intel.com>
11 * 12 *
12 * This program is free software; you can redistribute it and/or modify it 13 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the Free 14 * under the terms and conditions of the GNU General Public License,
14 * Software Foundation; either version 2 of the License, or (at your option) 15 * version 2, as published by the Free Software Foundation.
15 * any later version. 16 *
17 * This program is distributed in the hope it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
20 * more details.
21 *
22 * You should have received a copy of the GNU General Public License along with
23 * this program; if not, write to the Free Software Foundation, Inc.,
24 * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
16 * 25 *
17 */ 26 */
18#include <linux/init.h> 27#include <linux/init.h>
@@ -75,99 +84,92 @@ static u32 __pure crc32c_intel_le_hw(u32 crc, unsigned char const *p, size_t len
75 * If your algorithm starts with ~0, then XOR with ~0 before you set 84 * If your algorithm starts with ~0, then XOR with ~0 before you set
76 * the seed. 85 * the seed.
77 */ 86 */
78static int crc32c_intel_setkey(struct crypto_ahash *hash, const u8 *key, 87static int crc32c_intel_setkey(struct crypto_shash *hash, const u8 *key,
79 unsigned int keylen) 88 unsigned int keylen)
80{ 89{
81 u32 *mctx = crypto_ahash_ctx(hash); 90 u32 *mctx = crypto_shash_ctx(hash);
82 91
83 if (keylen != sizeof(u32)) { 92 if (keylen != sizeof(u32)) {
84 crypto_ahash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN); 93 crypto_shash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN);
85 return -EINVAL; 94 return -EINVAL;
86 } 95 }
87 *mctx = le32_to_cpup((__le32 *)key); 96 *mctx = le32_to_cpup((__le32 *)key);
88 return 0; 97 return 0;
89} 98}
90 99
91static int crc32c_intel_init(struct ahash_request *req) 100static int crc32c_intel_init(struct shash_desc *desc)
92{ 101{
93 u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 102 u32 *mctx = crypto_shash_ctx(desc->tfm);
94 u32 *crcp = ahash_request_ctx(req); 103 u32 *crcp = shash_desc_ctx(desc);
95 104
96 *crcp = *mctx; 105 *crcp = *mctx;
97 106
98 return 0; 107 return 0;
99} 108}
100 109
101static int crc32c_intel_update(struct ahash_request *req) 110static int crc32c_intel_update(struct shash_desc *desc, const u8 *data,
111 unsigned int len)
102{ 112{
103 struct crypto_hash_walk walk; 113 u32 *crcp = shash_desc_ctx(desc);
104 u32 *crcp = ahash_request_ctx(req);
105 u32 crc = *crcp;
106 int nbytes;
107
108 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes;
109 nbytes = crypto_hash_walk_done(&walk, 0))
110 crc = crc32c_intel_le_hw(crc, walk.data, nbytes);
111 114
112 *crcp = crc; 115 *crcp = crc32c_intel_le_hw(*crcp, data, len);
113 return 0; 116 return 0;
114} 117}
115 118
116static int crc32c_intel_final(struct ahash_request *req) 119static int __crc32c_intel_finup(u32 *crcp, const u8 *data, unsigned int len,
120 u8 *out)
117{ 121{
118 u32 *crcp = ahash_request_ctx(req); 122 *(__le32 *)out = ~cpu_to_le32(crc32c_intel_le_hw(*crcp, data, len));
119
120 *(__le32 *)req->result = ~cpu_to_le32p(crcp);
121 return 0; 123 return 0;
122} 124}
123 125
124static int crc32c_intel_digest(struct ahash_request *req) 126static int crc32c_intel_finup(struct shash_desc *desc, const u8 *data,
127 unsigned int len, u8 *out)
125{ 128{
126 struct crypto_hash_walk walk; 129 return __crc32c_intel_finup(shash_desc_ctx(desc), data, len, out);
127 u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 130}
128 u32 crc = *mctx;
129 int nbytes;
130 131
131 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes; 132static int crc32c_intel_final(struct shash_desc *desc, u8 *out)
132 nbytes = crypto_hash_walk_done(&walk, 0)) 133{
133 crc = crc32c_intel_le_hw(crc, walk.data, nbytes); 134 u32 *crcp = shash_desc_ctx(desc);
134 135
135 *(__le32 *)req->result = ~cpu_to_le32(crc); 136 *(__le32 *)out = ~cpu_to_le32p(crcp);
136 return 0; 137 return 0;
137} 138}
138 139
140static int crc32c_intel_digest(struct shash_desc *desc, const u8 *data,
141 unsigned int len, u8 *out)
142{
143 return __crc32c_intel_finup(crypto_shash_ctx(desc->tfm), data, len,
144 out);
145}
146
139static int crc32c_intel_cra_init(struct crypto_tfm *tfm) 147static int crc32c_intel_cra_init(struct crypto_tfm *tfm)
140{ 148{
141 u32 *key = crypto_tfm_ctx(tfm); 149 u32 *key = crypto_tfm_ctx(tfm);
142 150
143 *key = ~0; 151 *key = ~0;
144 152
145 tfm->crt_ahash.reqsize = sizeof(u32);
146
147 return 0; 153 return 0;
148} 154}
149 155
150static struct crypto_alg alg = { 156static struct shash_alg alg = {
151 .cra_name = "crc32c", 157 .setkey = crc32c_intel_setkey,
152 .cra_driver_name = "crc32c-intel", 158 .init = crc32c_intel_init,
153 .cra_priority = 200, 159 .update = crc32c_intel_update,
154 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 160 .final = crc32c_intel_final,
155 .cra_blocksize = CHKSUM_BLOCK_SIZE, 161 .finup = crc32c_intel_finup,
156 .cra_alignmask = 3, 162 .digest = crc32c_intel_digest,
157 .cra_ctxsize = sizeof(u32), 163 .descsize = sizeof(u32),
158 .cra_module = THIS_MODULE, 164 .digestsize = CHKSUM_DIGEST_SIZE,
159 .cra_list = LIST_HEAD_INIT(alg.cra_list), 165 .base = {
160 .cra_init = crc32c_intel_cra_init, 166 .cra_name = "crc32c",
161 .cra_type = &crypto_ahash_type, 167 .cra_driver_name = "crc32c-intel",
162 .cra_u = { 168 .cra_priority = 200,
163 .ahash = { 169 .cra_blocksize = CHKSUM_BLOCK_SIZE,
164 .digestsize = CHKSUM_DIGEST_SIZE, 170 .cra_ctxsize = sizeof(u32),
165 .setkey = crc32c_intel_setkey, 171 .cra_module = THIS_MODULE,
166 .init = crc32c_intel_init, 172 .cra_init = crc32c_intel_cra_init,
167 .update = crc32c_intel_update,
168 .final = crc32c_intel_final,
169 .digest = crc32c_intel_digest,
170 }
171 } 173 }
172}; 174};
173 175
@@ -175,14 +177,14 @@ static struct crypto_alg alg = {
175static int __init crc32c_intel_mod_init(void) 177static int __init crc32c_intel_mod_init(void)
176{ 178{
177 if (cpu_has_xmm4_2) 179 if (cpu_has_xmm4_2)
178 return crypto_register_alg(&alg); 180 return crypto_register_shash(&alg);
179 else 181 else
180 return -ENODEV; 182 return -ENODEV;
181} 183}
182 184
183static void __exit crc32c_intel_mod_fini(void) 185static void __exit crc32c_intel_mod_fini(void)
184{ 186{
185 crypto_unregister_alg(&alg); 187 crypto_unregister_shash(&alg);
186} 188}
187 189
188module_init(crc32c_intel_mod_init); 190module_init(crc32c_intel_mod_init);
@@ -194,4 +196,3 @@ MODULE_LICENSE("GPL");
194 196
195MODULE_ALIAS("crc32c"); 197MODULE_ALIAS("crc32c");
196MODULE_ALIAS("crc32c-intel"); 198MODULE_ALIAS("crc32c-intel");
197
diff --git a/crypto/Kconfig b/crypto/Kconfig
index dc20a34ba5ef..8dde4fcf99c9 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -102,6 +102,7 @@ config CRYPTO_NULL
102 tristate "Null algorithms" 102 tristate "Null algorithms"
103 select CRYPTO_ALGAPI 103 select CRYPTO_ALGAPI
104 select CRYPTO_BLKCIPHER 104 select CRYPTO_BLKCIPHER
105 select CRYPTO_HASH
105 help 106 help
106 These are 'Null' algorithms, used by IPsec, which do nothing. 107 These are 'Null' algorithms, used by IPsec, which do nothing.
107 108
@@ -256,12 +257,10 @@ comment "Digest"
256config CRYPTO_CRC32C 257config CRYPTO_CRC32C
257 tristate "CRC32c CRC algorithm" 258 tristate "CRC32c CRC algorithm"
258 select CRYPTO_HASH 259 select CRYPTO_HASH
259 select LIBCRC32C
260 help 260 help
261 Castagnoli, et al Cyclic Redundancy-Check Algorithm. Used 261 Castagnoli, et al Cyclic Redundancy-Check Algorithm. Used
262 by iSCSI for header and data digests and by others. 262 by iSCSI for header and data digests and by others.
263 See Castagnoli93. This implementation uses lib/libcrc32c. 263 See Castagnoli93. Module will be crc32c.
264 Module will be crc32c.
265 264
266config CRYPTO_CRC32C_INTEL 265config CRYPTO_CRC32C_INTEL
267 tristate "CRC32c INTEL hardware acceleration" 266 tristate "CRC32c INTEL hardware acceleration"
@@ -277,19 +276,19 @@ config CRYPTO_CRC32C_INTEL
277 276
278config CRYPTO_MD4 277config CRYPTO_MD4
279 tristate "MD4 digest algorithm" 278 tristate "MD4 digest algorithm"
280 select CRYPTO_ALGAPI 279 select CRYPTO_HASH
281 help 280 help
282 MD4 message digest algorithm (RFC1320). 281 MD4 message digest algorithm (RFC1320).
283 282
284config CRYPTO_MD5 283config CRYPTO_MD5
285 tristate "MD5 digest algorithm" 284 tristate "MD5 digest algorithm"
286 select CRYPTO_ALGAPI 285 select CRYPTO_HASH
287 help 286 help
288 MD5 message digest algorithm (RFC1321). 287 MD5 message digest algorithm (RFC1321).
289 288
290config CRYPTO_MICHAEL_MIC 289config CRYPTO_MICHAEL_MIC
291 tristate "Michael MIC keyed digest algorithm" 290 tristate "Michael MIC keyed digest algorithm"
292 select CRYPTO_ALGAPI 291 select CRYPTO_HASH
293 help 292 help
294 Michael MIC is used for message integrity protection in TKIP 293 Michael MIC is used for message integrity protection in TKIP
295 (IEEE 802.11i). This algorithm is required for TKIP, but it 294 (IEEE 802.11i). This algorithm is required for TKIP, but it
@@ -298,7 +297,7 @@ config CRYPTO_MICHAEL_MIC
298 297
299config CRYPTO_RMD128 298config CRYPTO_RMD128
300 tristate "RIPEMD-128 digest algorithm" 299 tristate "RIPEMD-128 digest algorithm"
301 select CRYPTO_ALGAPI 300 select CRYPTO_HASH
302 help 301 help
303 RIPEMD-128 (ISO/IEC 10118-3:2004). 302 RIPEMD-128 (ISO/IEC 10118-3:2004).
304 303
@@ -311,7 +310,7 @@ config CRYPTO_RMD128
311 310
312config CRYPTO_RMD160 311config CRYPTO_RMD160
313 tristate "RIPEMD-160 digest algorithm" 312 tristate "RIPEMD-160 digest algorithm"
314 select CRYPTO_ALGAPI 313 select CRYPTO_HASH
315 help 314 help
316 RIPEMD-160 (ISO/IEC 10118-3:2004). 315 RIPEMD-160 (ISO/IEC 10118-3:2004).
317 316
@@ -328,7 +327,7 @@ config CRYPTO_RMD160
328 327
329config CRYPTO_RMD256 328config CRYPTO_RMD256
330 tristate "RIPEMD-256 digest algorithm" 329 tristate "RIPEMD-256 digest algorithm"
331 select CRYPTO_ALGAPI 330 select CRYPTO_HASH
332 help 331 help
333 RIPEMD-256 is an optional extension of RIPEMD-128 with a 332 RIPEMD-256 is an optional extension of RIPEMD-128 with a
334 256 bit hash. It is intended for applications that require 333 256 bit hash. It is intended for applications that require
@@ -340,7 +339,7 @@ config CRYPTO_RMD256
340 339
341config CRYPTO_RMD320 340config CRYPTO_RMD320
342 tristate "RIPEMD-320 digest algorithm" 341 tristate "RIPEMD-320 digest algorithm"
343 select CRYPTO_ALGAPI 342 select CRYPTO_HASH
344 help 343 help
345 RIPEMD-320 is an optional extension of RIPEMD-160 with a 344 RIPEMD-320 is an optional extension of RIPEMD-160 with a
346 320 bit hash. It is intended for applications that require 345 320 bit hash. It is intended for applications that require
@@ -352,13 +351,13 @@ config CRYPTO_RMD320
352 351
353config CRYPTO_SHA1 352config CRYPTO_SHA1
354 tristate "SHA1 digest algorithm" 353 tristate "SHA1 digest algorithm"
355 select CRYPTO_ALGAPI 354 select CRYPTO_HASH
356 help 355 help
357 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2). 356 SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
358 357
359config CRYPTO_SHA256 358config CRYPTO_SHA256
360 tristate "SHA224 and SHA256 digest algorithm" 359 tristate "SHA224 and SHA256 digest algorithm"
361 select CRYPTO_ALGAPI 360 select CRYPTO_HASH
362 help 361 help
363 SHA256 secure hash standard (DFIPS 180-2). 362 SHA256 secure hash standard (DFIPS 180-2).
364 363
@@ -370,7 +369,7 @@ config CRYPTO_SHA256
370 369
371config CRYPTO_SHA512 370config CRYPTO_SHA512
372 tristate "SHA384 and SHA512 digest algorithms" 371 tristate "SHA384 and SHA512 digest algorithms"
373 select CRYPTO_ALGAPI 372 select CRYPTO_HASH
374 help 373 help
375 SHA512 secure hash standard (DFIPS 180-2). 374 SHA512 secure hash standard (DFIPS 180-2).
376 375
@@ -382,7 +381,7 @@ config CRYPTO_SHA512
382 381
383config CRYPTO_TGR192 382config CRYPTO_TGR192
384 tristate "Tiger digest algorithms" 383 tristate "Tiger digest algorithms"
385 select CRYPTO_ALGAPI 384 select CRYPTO_HASH
386 help 385 help
387 Tiger hash algorithm 192, 160 and 128-bit hashes 386 Tiger hash algorithm 192, 160 and 128-bit hashes
388 387
@@ -395,7 +394,7 @@ config CRYPTO_TGR192
395 394
396config CRYPTO_WP512 395config CRYPTO_WP512
397 tristate "Whirlpool digest algorithms" 396 tristate "Whirlpool digest algorithms"
398 select CRYPTO_ALGAPI 397 select CRYPTO_HASH
399 help 398 help
400 Whirlpool hash algorithm 512, 384 and 256-bit hashes 399 Whirlpool hash algorithm 512, 384 and 256-bit hashes
401 400
diff --git a/crypto/Makefile b/crypto/Makefile
index cd4a4ed078ff..46b08bf2035f 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -22,6 +22,7 @@ obj-$(CONFIG_CRYPTO_SEQIV) += seqiv.o
22 22
23crypto_hash-objs := hash.o 23crypto_hash-objs := hash.o
24crypto_hash-objs += ahash.o 24crypto_hash-objs += ahash.o
25crypto_hash-objs += shash.o
25obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o 26obj-$(CONFIG_CRYPTO_HASH2) += crypto_hash.o
26 27
27cryptomgr-objs := algboss.o testmgr.o 28cryptomgr-objs := algboss.o testmgr.o
diff --git a/crypto/aes_generic.c b/crypto/aes_generic.c
index 136dc98d8a03..b8b66ec3883b 100644
--- a/crypto/aes_generic.c
+++ b/crypto/aes_generic.c
@@ -60,102 +60,1068 @@ static inline u8 byte(const u32 x, const unsigned n)
60 return x >> (n << 3); 60 return x >> (n << 3);
61} 61}
62 62
63static u8 pow_tab[256] __initdata; 63static const u32 rco_tab[10] = { 1, 2, 4, 8, 16, 32, 64, 128, 27, 54 };
64static u8 log_tab[256] __initdata; 64
65static u8 sbx_tab[256] __initdata; 65const u32 crypto_ft_tab[4][256] = {
66static u8 isb_tab[256] __initdata; 66 {
67static u32 rco_tab[10]; 67 0xa56363c6, 0x847c7cf8, 0x997777ee, 0x8d7b7bf6,
68 68 0x0df2f2ff, 0xbd6b6bd6, 0xb16f6fde, 0x54c5c591,
69u32 crypto_ft_tab[4][256]; 69 0x50303060, 0x03010102, 0xa96767ce, 0x7d2b2b56,
70u32 crypto_fl_tab[4][256]; 70 0x19fefee7, 0x62d7d7b5, 0xe6abab4d, 0x9a7676ec,
71u32 crypto_it_tab[4][256]; 71 0x45caca8f, 0x9d82821f, 0x40c9c989, 0x877d7dfa,
72u32 crypto_il_tab[4][256]; 72 0x15fafaef, 0xeb5959b2, 0xc947478e, 0x0bf0f0fb,
73 73 0xecadad41, 0x67d4d4b3, 0xfda2a25f, 0xeaafaf45,
74EXPORT_SYMBOL_GPL(crypto_ft_tab); 74 0xbf9c9c23, 0xf7a4a453, 0x967272e4, 0x5bc0c09b,
75EXPORT_SYMBOL_GPL(crypto_fl_tab); 75 0xc2b7b775, 0x1cfdfde1, 0xae93933d, 0x6a26264c,
76EXPORT_SYMBOL_GPL(crypto_it_tab); 76 0x5a36366c, 0x413f3f7e, 0x02f7f7f5, 0x4fcccc83,
77EXPORT_SYMBOL_GPL(crypto_il_tab); 77 0x5c343468, 0xf4a5a551, 0x34e5e5d1, 0x08f1f1f9,
78 78 0x937171e2, 0x73d8d8ab, 0x53313162, 0x3f15152a,
79static inline u8 __init f_mult(u8 a, u8 b) 79 0x0c040408, 0x52c7c795, 0x65232346, 0x5ec3c39d,
80{ 80 0x28181830, 0xa1969637, 0x0f05050a, 0xb59a9a2f,
81 u8 aa = log_tab[a], cc = aa + log_tab[b]; 81 0x0907070e, 0x36121224, 0x9b80801b, 0x3de2e2df,
82 82 0x26ebebcd, 0x6927274e, 0xcdb2b27f, 0x9f7575ea,
83 return pow_tab[cc + (cc < aa ? 1 : 0)]; 83 0x1b090912, 0x9e83831d, 0x742c2c58, 0x2e1a1a34,
84} 84 0x2d1b1b36, 0xb26e6edc, 0xee5a5ab4, 0xfba0a05b,
85 85 0xf65252a4, 0x4d3b3b76, 0x61d6d6b7, 0xceb3b37d,
86#define ff_mult(a, b) (a && b ? f_mult(a, b) : 0) 86 0x7b292952, 0x3ee3e3dd, 0x712f2f5e, 0x97848413,
87 87 0xf55353a6, 0x68d1d1b9, 0x00000000, 0x2cededc1,
88static void __init gen_tabs(void) 88 0x60202040, 0x1ffcfce3, 0xc8b1b179, 0xed5b5bb6,
89{ 89 0xbe6a6ad4, 0x46cbcb8d, 0xd9bebe67, 0x4b393972,
90 u32 i, t; 90 0xde4a4a94, 0xd44c4c98, 0xe85858b0, 0x4acfcf85,
91 u8 p, q; 91 0x6bd0d0bb, 0x2aefefc5, 0xe5aaaa4f, 0x16fbfbed,
92 92 0xc5434386, 0xd74d4d9a, 0x55333366, 0x94858511,
93 /* 93 0xcf45458a, 0x10f9f9e9, 0x06020204, 0x817f7ffe,
94 * log and power tables for GF(2**8) finite field with 94 0xf05050a0, 0x443c3c78, 0xba9f9f25, 0xe3a8a84b,
95 * 0x011b as modular polynomial - the simplest primitive 95 0xf35151a2, 0xfea3a35d, 0xc0404080, 0x8a8f8f05,
96 * root is 0x03, used here to generate the tables 96 0xad92923f, 0xbc9d9d21, 0x48383870, 0x04f5f5f1,
97 */ 97 0xdfbcbc63, 0xc1b6b677, 0x75dadaaf, 0x63212142,
98 98 0x30101020, 0x1affffe5, 0x0ef3f3fd, 0x6dd2d2bf,
99 for (i = 0, p = 1; i < 256; ++i) { 99 0x4ccdcd81, 0x140c0c18, 0x35131326, 0x2fececc3,
100 pow_tab[i] = (u8) p; 100 0xe15f5fbe, 0xa2979735, 0xcc444488, 0x3917172e,
101 log_tab[p] = (u8) i; 101 0x57c4c493, 0xf2a7a755, 0x827e7efc, 0x473d3d7a,
102 102 0xac6464c8, 0xe75d5dba, 0x2b191932, 0x957373e6,
103 p ^= (p << 1) ^ (p & 0x80 ? 0x01b : 0); 103 0xa06060c0, 0x98818119, 0xd14f4f9e, 0x7fdcdca3,
104 0x66222244, 0x7e2a2a54, 0xab90903b, 0x8388880b,
105 0xca46468c, 0x29eeeec7, 0xd3b8b86b, 0x3c141428,
106 0x79dedea7, 0xe25e5ebc, 0x1d0b0b16, 0x76dbdbad,
107 0x3be0e0db, 0x56323264, 0x4e3a3a74, 0x1e0a0a14,
108 0xdb494992, 0x0a06060c, 0x6c242448, 0xe45c5cb8,
109 0x5dc2c29f, 0x6ed3d3bd, 0xefacac43, 0xa66262c4,
110 0xa8919139, 0xa4959531, 0x37e4e4d3, 0x8b7979f2,
111 0x32e7e7d5, 0x43c8c88b, 0x5937376e, 0xb76d6dda,
112 0x8c8d8d01, 0x64d5d5b1, 0xd24e4e9c, 0xe0a9a949,
113 0xb46c6cd8, 0xfa5656ac, 0x07f4f4f3, 0x25eaeacf,
114 0xaf6565ca, 0x8e7a7af4, 0xe9aeae47, 0x18080810,
115 0xd5baba6f, 0x887878f0, 0x6f25254a, 0x722e2e5c,
116 0x241c1c38, 0xf1a6a657, 0xc7b4b473, 0x51c6c697,
117 0x23e8e8cb, 0x7cdddda1, 0x9c7474e8, 0x211f1f3e,
118 0xdd4b4b96, 0xdcbdbd61, 0x868b8b0d, 0x858a8a0f,
119 0x907070e0, 0x423e3e7c, 0xc4b5b571, 0xaa6666cc,
120 0xd8484890, 0x05030306, 0x01f6f6f7, 0x120e0e1c,
121 0xa36161c2, 0x5f35356a, 0xf95757ae, 0xd0b9b969,
122 0x91868617, 0x58c1c199, 0x271d1d3a, 0xb99e9e27,
123 0x38e1e1d9, 0x13f8f8eb, 0xb398982b, 0x33111122,
124 0xbb6969d2, 0x70d9d9a9, 0x898e8e07, 0xa7949433,
125 0xb69b9b2d, 0x221e1e3c, 0x92878715, 0x20e9e9c9,
126 0x49cece87, 0xff5555aa, 0x78282850, 0x7adfdfa5,
127 0x8f8c8c03, 0xf8a1a159, 0x80898909, 0x170d0d1a,
128 0xdabfbf65, 0x31e6e6d7, 0xc6424284, 0xb86868d0,
129 0xc3414182, 0xb0999929, 0x772d2d5a, 0x110f0f1e,
130 0xcbb0b07b, 0xfc5454a8, 0xd6bbbb6d, 0x3a16162c,
131 }, {
132 0x6363c6a5, 0x7c7cf884, 0x7777ee99, 0x7b7bf68d,
133 0xf2f2ff0d, 0x6b6bd6bd, 0x6f6fdeb1, 0xc5c59154,
134 0x30306050, 0x01010203, 0x6767cea9, 0x2b2b567d,
135 0xfefee719, 0xd7d7b562, 0xabab4de6, 0x7676ec9a,
136 0xcaca8f45, 0x82821f9d, 0xc9c98940, 0x7d7dfa87,
137 0xfafaef15, 0x5959b2eb, 0x47478ec9, 0xf0f0fb0b,
138 0xadad41ec, 0xd4d4b367, 0xa2a25ffd, 0xafaf45ea,
139 0x9c9c23bf, 0xa4a453f7, 0x7272e496, 0xc0c09b5b,
140 0xb7b775c2, 0xfdfde11c, 0x93933dae, 0x26264c6a,
141 0x36366c5a, 0x3f3f7e41, 0xf7f7f502, 0xcccc834f,
142 0x3434685c, 0xa5a551f4, 0xe5e5d134, 0xf1f1f908,
143 0x7171e293, 0xd8d8ab73, 0x31316253, 0x15152a3f,
144 0x0404080c, 0xc7c79552, 0x23234665, 0xc3c39d5e,
145 0x18183028, 0x969637a1, 0x05050a0f, 0x9a9a2fb5,
146 0x07070e09, 0x12122436, 0x80801b9b, 0xe2e2df3d,
147 0xebebcd26, 0x27274e69, 0xb2b27fcd, 0x7575ea9f,
148 0x0909121b, 0x83831d9e, 0x2c2c5874, 0x1a1a342e,
149 0x1b1b362d, 0x6e6edcb2, 0x5a5ab4ee, 0xa0a05bfb,
150 0x5252a4f6, 0x3b3b764d, 0xd6d6b761, 0xb3b37dce,
151 0x2929527b, 0xe3e3dd3e, 0x2f2f5e71, 0x84841397,
152 0x5353a6f5, 0xd1d1b968, 0x00000000, 0xededc12c,
153 0x20204060, 0xfcfce31f, 0xb1b179c8, 0x5b5bb6ed,
154 0x6a6ad4be, 0xcbcb8d46, 0xbebe67d9, 0x3939724b,
155 0x4a4a94de, 0x4c4c98d4, 0x5858b0e8, 0xcfcf854a,
156 0xd0d0bb6b, 0xefefc52a, 0xaaaa4fe5, 0xfbfbed16,
157 0x434386c5, 0x4d4d9ad7, 0x33336655, 0x85851194,
158 0x45458acf, 0xf9f9e910, 0x02020406, 0x7f7ffe81,
159 0x5050a0f0, 0x3c3c7844, 0x9f9f25ba, 0xa8a84be3,
160 0x5151a2f3, 0xa3a35dfe, 0x404080c0, 0x8f8f058a,
161 0x92923fad, 0x9d9d21bc, 0x38387048, 0xf5f5f104,
162 0xbcbc63df, 0xb6b677c1, 0xdadaaf75, 0x21214263,
163 0x10102030, 0xffffe51a, 0xf3f3fd0e, 0xd2d2bf6d,
164 0xcdcd814c, 0x0c0c1814, 0x13132635, 0xececc32f,
165 0x5f5fbee1, 0x979735a2, 0x444488cc, 0x17172e39,
166 0xc4c49357, 0xa7a755f2, 0x7e7efc82, 0x3d3d7a47,
167 0x6464c8ac, 0x5d5dbae7, 0x1919322b, 0x7373e695,
168 0x6060c0a0, 0x81811998, 0x4f4f9ed1, 0xdcdca37f,
169 0x22224466, 0x2a2a547e, 0x90903bab, 0x88880b83,
170 0x46468cca, 0xeeeec729, 0xb8b86bd3, 0x1414283c,
171 0xdedea779, 0x5e5ebce2, 0x0b0b161d, 0xdbdbad76,
172 0xe0e0db3b, 0x32326456, 0x3a3a744e, 0x0a0a141e,
173 0x494992db, 0x06060c0a, 0x2424486c, 0x5c5cb8e4,
174 0xc2c29f5d, 0xd3d3bd6e, 0xacac43ef, 0x6262c4a6,
175 0x919139a8, 0x959531a4, 0xe4e4d337, 0x7979f28b,
176 0xe7e7d532, 0xc8c88b43, 0x37376e59, 0x6d6ddab7,
177 0x8d8d018c, 0xd5d5b164, 0x4e4e9cd2, 0xa9a949e0,
178 0x6c6cd8b4, 0x5656acfa, 0xf4f4f307, 0xeaeacf25,
179 0x6565caaf, 0x7a7af48e, 0xaeae47e9, 0x08081018,
180 0xbaba6fd5, 0x7878f088, 0x25254a6f, 0x2e2e5c72,
181 0x1c1c3824, 0xa6a657f1, 0xb4b473c7, 0xc6c69751,
182 0xe8e8cb23, 0xdddda17c, 0x7474e89c, 0x1f1f3e21,
183 0x4b4b96dd, 0xbdbd61dc, 0x8b8b0d86, 0x8a8a0f85,
184 0x7070e090, 0x3e3e7c42, 0xb5b571c4, 0x6666ccaa,
185 0x484890d8, 0x03030605, 0xf6f6f701, 0x0e0e1c12,
186 0x6161c2a3, 0x35356a5f, 0x5757aef9, 0xb9b969d0,
187 0x86861791, 0xc1c19958, 0x1d1d3a27, 0x9e9e27b9,
188 0xe1e1d938, 0xf8f8eb13, 0x98982bb3, 0x11112233,
189 0x6969d2bb, 0xd9d9a970, 0x8e8e0789, 0x949433a7,
190 0x9b9b2db6, 0x1e1e3c22, 0x87871592, 0xe9e9c920,
191 0xcece8749, 0x5555aaff, 0x28285078, 0xdfdfa57a,
192 0x8c8c038f, 0xa1a159f8, 0x89890980, 0x0d0d1a17,
193 0xbfbf65da, 0xe6e6d731, 0x424284c6, 0x6868d0b8,
194 0x414182c3, 0x999929b0, 0x2d2d5a77, 0x0f0f1e11,
195 0xb0b07bcb, 0x5454a8fc, 0xbbbb6dd6, 0x16162c3a,
196 }, {
197 0x63c6a563, 0x7cf8847c, 0x77ee9977, 0x7bf68d7b,
198 0xf2ff0df2, 0x6bd6bd6b, 0x6fdeb16f, 0xc59154c5,
199 0x30605030, 0x01020301, 0x67cea967, 0x2b567d2b,
200 0xfee719fe, 0xd7b562d7, 0xab4de6ab, 0x76ec9a76,
201 0xca8f45ca, 0x821f9d82, 0xc98940c9, 0x7dfa877d,
202 0xfaef15fa, 0x59b2eb59, 0x478ec947, 0xf0fb0bf0,
203 0xad41ecad, 0xd4b367d4, 0xa25ffda2, 0xaf45eaaf,
204 0x9c23bf9c, 0xa453f7a4, 0x72e49672, 0xc09b5bc0,
205 0xb775c2b7, 0xfde11cfd, 0x933dae93, 0x264c6a26,
206 0x366c5a36, 0x3f7e413f, 0xf7f502f7, 0xcc834fcc,
207 0x34685c34, 0xa551f4a5, 0xe5d134e5, 0xf1f908f1,
208 0x71e29371, 0xd8ab73d8, 0x31625331, 0x152a3f15,
209 0x04080c04, 0xc79552c7, 0x23466523, 0xc39d5ec3,
210 0x18302818, 0x9637a196, 0x050a0f05, 0x9a2fb59a,
211 0x070e0907, 0x12243612, 0x801b9b80, 0xe2df3de2,
212 0xebcd26eb, 0x274e6927, 0xb27fcdb2, 0x75ea9f75,
213 0x09121b09, 0x831d9e83, 0x2c58742c, 0x1a342e1a,
214 0x1b362d1b, 0x6edcb26e, 0x5ab4ee5a, 0xa05bfba0,
215 0x52a4f652, 0x3b764d3b, 0xd6b761d6, 0xb37dceb3,
216 0x29527b29, 0xe3dd3ee3, 0x2f5e712f, 0x84139784,
217 0x53a6f553, 0xd1b968d1, 0x00000000, 0xedc12ced,
218 0x20406020, 0xfce31ffc, 0xb179c8b1, 0x5bb6ed5b,
219 0x6ad4be6a, 0xcb8d46cb, 0xbe67d9be, 0x39724b39,
220 0x4a94de4a, 0x4c98d44c, 0x58b0e858, 0xcf854acf,
221 0xd0bb6bd0, 0xefc52aef, 0xaa4fe5aa, 0xfbed16fb,
222 0x4386c543, 0x4d9ad74d, 0x33665533, 0x85119485,
223 0x458acf45, 0xf9e910f9, 0x02040602, 0x7ffe817f,
224 0x50a0f050, 0x3c78443c, 0x9f25ba9f, 0xa84be3a8,
225 0x51a2f351, 0xa35dfea3, 0x4080c040, 0x8f058a8f,
226 0x923fad92, 0x9d21bc9d, 0x38704838, 0xf5f104f5,
227 0xbc63dfbc, 0xb677c1b6, 0xdaaf75da, 0x21426321,
228 0x10203010, 0xffe51aff, 0xf3fd0ef3, 0xd2bf6dd2,
229 0xcd814ccd, 0x0c18140c, 0x13263513, 0xecc32fec,
230 0x5fbee15f, 0x9735a297, 0x4488cc44, 0x172e3917,
231 0xc49357c4, 0xa755f2a7, 0x7efc827e, 0x3d7a473d,
232 0x64c8ac64, 0x5dbae75d, 0x19322b19, 0x73e69573,
233 0x60c0a060, 0x81199881, 0x4f9ed14f, 0xdca37fdc,
234 0x22446622, 0x2a547e2a, 0x903bab90, 0x880b8388,
235 0x468cca46, 0xeec729ee, 0xb86bd3b8, 0x14283c14,
236 0xdea779de, 0x5ebce25e, 0x0b161d0b, 0xdbad76db,
237 0xe0db3be0, 0x32645632, 0x3a744e3a, 0x0a141e0a,
238 0x4992db49, 0x060c0a06, 0x24486c24, 0x5cb8e45c,
239 0xc29f5dc2, 0xd3bd6ed3, 0xac43efac, 0x62c4a662,
240 0x9139a891, 0x9531a495, 0xe4d337e4, 0x79f28b79,
241 0xe7d532e7, 0xc88b43c8, 0x376e5937, 0x6ddab76d,
242 0x8d018c8d, 0xd5b164d5, 0x4e9cd24e, 0xa949e0a9,
243 0x6cd8b46c, 0x56acfa56, 0xf4f307f4, 0xeacf25ea,
244 0x65caaf65, 0x7af48e7a, 0xae47e9ae, 0x08101808,
245 0xba6fd5ba, 0x78f08878, 0x254a6f25, 0x2e5c722e,
246 0x1c38241c, 0xa657f1a6, 0xb473c7b4, 0xc69751c6,
247 0xe8cb23e8, 0xdda17cdd, 0x74e89c74, 0x1f3e211f,
248 0x4b96dd4b, 0xbd61dcbd, 0x8b0d868b, 0x8a0f858a,
249 0x70e09070, 0x3e7c423e, 0xb571c4b5, 0x66ccaa66,
250 0x4890d848, 0x03060503, 0xf6f701f6, 0x0e1c120e,
251 0x61c2a361, 0x356a5f35, 0x57aef957, 0xb969d0b9,
252 0x86179186, 0xc19958c1, 0x1d3a271d, 0x9e27b99e,
253 0xe1d938e1, 0xf8eb13f8, 0x982bb398, 0x11223311,
254 0x69d2bb69, 0xd9a970d9, 0x8e07898e, 0x9433a794,
255 0x9b2db69b, 0x1e3c221e, 0x87159287, 0xe9c920e9,
256 0xce8749ce, 0x55aaff55, 0x28507828, 0xdfa57adf,
257 0x8c038f8c, 0xa159f8a1, 0x89098089, 0x0d1a170d,
258 0xbf65dabf, 0xe6d731e6, 0x4284c642, 0x68d0b868,
259 0x4182c341, 0x9929b099, 0x2d5a772d, 0x0f1e110f,
260 0xb07bcbb0, 0x54a8fc54, 0xbb6dd6bb, 0x162c3a16,
261 }, {
262 0xc6a56363, 0xf8847c7c, 0xee997777, 0xf68d7b7b,
263 0xff0df2f2, 0xd6bd6b6b, 0xdeb16f6f, 0x9154c5c5,
264 0x60503030, 0x02030101, 0xcea96767, 0x567d2b2b,
265 0xe719fefe, 0xb562d7d7, 0x4de6abab, 0xec9a7676,
266 0x8f45caca, 0x1f9d8282, 0x8940c9c9, 0xfa877d7d,
267 0xef15fafa, 0xb2eb5959, 0x8ec94747, 0xfb0bf0f0,
268 0x41ecadad, 0xb367d4d4, 0x5ffda2a2, 0x45eaafaf,
269 0x23bf9c9c, 0x53f7a4a4, 0xe4967272, 0x9b5bc0c0,
270 0x75c2b7b7, 0xe11cfdfd, 0x3dae9393, 0x4c6a2626,
271 0x6c5a3636, 0x7e413f3f, 0xf502f7f7, 0x834fcccc,
272 0x685c3434, 0x51f4a5a5, 0xd134e5e5, 0xf908f1f1,
273 0xe2937171, 0xab73d8d8, 0x62533131, 0x2a3f1515,
274 0x080c0404, 0x9552c7c7, 0x46652323, 0x9d5ec3c3,
275 0x30281818, 0x37a19696, 0x0a0f0505, 0x2fb59a9a,
276 0x0e090707, 0x24361212, 0x1b9b8080, 0xdf3de2e2,
277 0xcd26ebeb, 0x4e692727, 0x7fcdb2b2, 0xea9f7575,
278 0x121b0909, 0x1d9e8383, 0x58742c2c, 0x342e1a1a,
279 0x362d1b1b, 0xdcb26e6e, 0xb4ee5a5a, 0x5bfba0a0,
280 0xa4f65252, 0x764d3b3b, 0xb761d6d6, 0x7dceb3b3,
281 0x527b2929, 0xdd3ee3e3, 0x5e712f2f, 0x13978484,
282 0xa6f55353, 0xb968d1d1, 0x00000000, 0xc12ceded,
283 0x40602020, 0xe31ffcfc, 0x79c8b1b1, 0xb6ed5b5b,
284 0xd4be6a6a, 0x8d46cbcb, 0x67d9bebe, 0x724b3939,
285 0x94de4a4a, 0x98d44c4c, 0xb0e85858, 0x854acfcf,
286 0xbb6bd0d0, 0xc52aefef, 0x4fe5aaaa, 0xed16fbfb,
287 0x86c54343, 0x9ad74d4d, 0x66553333, 0x11948585,
288 0x8acf4545, 0xe910f9f9, 0x04060202, 0xfe817f7f,
289 0xa0f05050, 0x78443c3c, 0x25ba9f9f, 0x4be3a8a8,
290 0xa2f35151, 0x5dfea3a3, 0x80c04040, 0x058a8f8f,
291 0x3fad9292, 0x21bc9d9d, 0x70483838, 0xf104f5f5,
292 0x63dfbcbc, 0x77c1b6b6, 0xaf75dada, 0x42632121,
293 0x20301010, 0xe51affff, 0xfd0ef3f3, 0xbf6dd2d2,
294 0x814ccdcd, 0x18140c0c, 0x26351313, 0xc32fecec,
295 0xbee15f5f, 0x35a29797, 0x88cc4444, 0x2e391717,
296 0x9357c4c4, 0x55f2a7a7, 0xfc827e7e, 0x7a473d3d,
297 0xc8ac6464, 0xbae75d5d, 0x322b1919, 0xe6957373,
298 0xc0a06060, 0x19988181, 0x9ed14f4f, 0xa37fdcdc,
299 0x44662222, 0x547e2a2a, 0x3bab9090, 0x0b838888,
300 0x8cca4646, 0xc729eeee, 0x6bd3b8b8, 0x283c1414,
301 0xa779dede, 0xbce25e5e, 0x161d0b0b, 0xad76dbdb,
302 0xdb3be0e0, 0x64563232, 0x744e3a3a, 0x141e0a0a,
303 0x92db4949, 0x0c0a0606, 0x486c2424, 0xb8e45c5c,
304 0x9f5dc2c2, 0xbd6ed3d3, 0x43efacac, 0xc4a66262,
305 0x39a89191, 0x31a49595, 0xd337e4e4, 0xf28b7979,
306 0xd532e7e7, 0x8b43c8c8, 0x6e593737, 0xdab76d6d,
307 0x018c8d8d, 0xb164d5d5, 0x9cd24e4e, 0x49e0a9a9,
308 0xd8b46c6c, 0xacfa5656, 0xf307f4f4, 0xcf25eaea,
309 0xcaaf6565, 0xf48e7a7a, 0x47e9aeae, 0x10180808,
310 0x6fd5baba, 0xf0887878, 0x4a6f2525, 0x5c722e2e,
311 0x38241c1c, 0x57f1a6a6, 0x73c7b4b4, 0x9751c6c6,
312 0xcb23e8e8, 0xa17cdddd, 0xe89c7474, 0x3e211f1f,
313 0x96dd4b4b, 0x61dcbdbd, 0x0d868b8b, 0x0f858a8a,
314 0xe0907070, 0x7c423e3e, 0x71c4b5b5, 0xccaa6666,
315 0x90d84848, 0x06050303, 0xf701f6f6, 0x1c120e0e,
316 0xc2a36161, 0x6a5f3535, 0xaef95757, 0x69d0b9b9,
317 0x17918686, 0x9958c1c1, 0x3a271d1d, 0x27b99e9e,
318 0xd938e1e1, 0xeb13f8f8, 0x2bb39898, 0x22331111,
319 0xd2bb6969, 0xa970d9d9, 0x07898e8e, 0x33a79494,
320 0x2db69b9b, 0x3c221e1e, 0x15928787, 0xc920e9e9,
321 0x8749cece, 0xaaff5555, 0x50782828, 0xa57adfdf,
322 0x038f8c8c, 0x59f8a1a1, 0x09808989, 0x1a170d0d,
323 0x65dabfbf, 0xd731e6e6, 0x84c64242, 0xd0b86868,
324 0x82c34141, 0x29b09999, 0x5a772d2d, 0x1e110f0f,
325 0x7bcbb0b0, 0xa8fc5454, 0x6dd6bbbb, 0x2c3a1616,
104 } 326 }
327};
105 328
106 log_tab[1] = 0; 329const u32 crypto_fl_tab[4][256] = {
107 330 {
108 for (i = 0, p = 1; i < 10; ++i) { 331 0x00000063, 0x0000007c, 0x00000077, 0x0000007b,
109 rco_tab[i] = p; 332 0x000000f2, 0x0000006b, 0x0000006f, 0x000000c5,
110 333 0x00000030, 0x00000001, 0x00000067, 0x0000002b,
111 p = (p << 1) ^ (p & 0x80 ? 0x01b : 0); 334 0x000000fe, 0x000000d7, 0x000000ab, 0x00000076,
335 0x000000ca, 0x00000082, 0x000000c9, 0x0000007d,
336 0x000000fa, 0x00000059, 0x00000047, 0x000000f0,
337 0x000000ad, 0x000000d4, 0x000000a2, 0x000000af,
338 0x0000009c, 0x000000a4, 0x00000072, 0x000000c0,
339 0x000000b7, 0x000000fd, 0x00000093, 0x00000026,
340 0x00000036, 0x0000003f, 0x000000f7, 0x000000cc,
341 0x00000034, 0x000000a5, 0x000000e5, 0x000000f1,
342 0x00000071, 0x000000d8, 0x00000031, 0x00000015,
343 0x00000004, 0x000000c7, 0x00000023, 0x000000c3,
344 0x00000018, 0x00000096, 0x00000005, 0x0000009a,
345 0x00000007, 0x00000012, 0x00000080, 0x000000e2,
346 0x000000eb, 0x00000027, 0x000000b2, 0x00000075,
347 0x00000009, 0x00000083, 0x0000002c, 0x0000001a,
348 0x0000001b, 0x0000006e, 0x0000005a, 0x000000a0,
349 0x00000052, 0x0000003b, 0x000000d6, 0x000000b3,
350 0x00000029, 0x000000e3, 0x0000002f, 0x00000084,
351 0x00000053, 0x000000d1, 0x00000000, 0x000000ed,
352 0x00000020, 0x000000fc, 0x000000b1, 0x0000005b,
353 0x0000006a, 0x000000cb, 0x000000be, 0x00000039,
354 0x0000004a, 0x0000004c, 0x00000058, 0x000000cf,
355 0x000000d0, 0x000000ef, 0x000000aa, 0x000000fb,
356 0x00000043, 0x0000004d, 0x00000033, 0x00000085,
357 0x00000045, 0x000000f9, 0x00000002, 0x0000007f,
358 0x00000050, 0x0000003c, 0x0000009f, 0x000000a8,
359 0x00000051, 0x000000a3, 0x00000040, 0x0000008f,
360 0x00000092, 0x0000009d, 0x00000038, 0x000000f5,
361 0x000000bc, 0x000000b6, 0x000000da, 0x00000021,
362 0x00000010, 0x000000ff, 0x000000f3, 0x000000d2,
363 0x000000cd, 0x0000000c, 0x00000013, 0x000000ec,
364 0x0000005f, 0x00000097, 0x00000044, 0x00000017,
365 0x000000c4, 0x000000a7, 0x0000007e, 0x0000003d,
366 0x00000064, 0x0000005d, 0x00000019, 0x00000073,
367 0x00000060, 0x00000081, 0x0000004f, 0x000000dc,
368 0x00000022, 0x0000002a, 0x00000090, 0x00000088,
369 0x00000046, 0x000000ee, 0x000000b8, 0x00000014,
370 0x000000de, 0x0000005e, 0x0000000b, 0x000000db,
371 0x000000e0, 0x00000032, 0x0000003a, 0x0000000a,
372 0x00000049, 0x00000006, 0x00000024, 0x0000005c,
373 0x000000c2, 0x000000d3, 0x000000ac, 0x00000062,
374 0x00000091, 0x00000095, 0x000000e4, 0x00000079,
375 0x000000e7, 0x000000c8, 0x00000037, 0x0000006d,
376 0x0000008d, 0x000000d5, 0x0000004e, 0x000000a9,
377 0x0000006c, 0x00000056, 0x000000f4, 0x000000ea,
378 0x00000065, 0x0000007a, 0x000000ae, 0x00000008,
379 0x000000ba, 0x00000078, 0x00000025, 0x0000002e,
380 0x0000001c, 0x000000a6, 0x000000b4, 0x000000c6,
381 0x000000e8, 0x000000dd, 0x00000074, 0x0000001f,
382 0x0000004b, 0x000000bd, 0x0000008b, 0x0000008a,
383 0x00000070, 0x0000003e, 0x000000b5, 0x00000066,
384 0x00000048, 0x00000003, 0x000000f6, 0x0000000e,
385 0x00000061, 0x00000035, 0x00000057, 0x000000b9,
386 0x00000086, 0x000000c1, 0x0000001d, 0x0000009e,
387 0x000000e1, 0x000000f8, 0x00000098, 0x00000011,
388 0x00000069, 0x000000d9, 0x0000008e, 0x00000094,
389 0x0000009b, 0x0000001e, 0x00000087, 0x000000e9,
390 0x000000ce, 0x00000055, 0x00000028, 0x000000df,
391 0x0000008c, 0x000000a1, 0x00000089, 0x0000000d,
392 0x000000bf, 0x000000e6, 0x00000042, 0x00000068,
393 0x00000041, 0x00000099, 0x0000002d, 0x0000000f,
394 0x000000b0, 0x00000054, 0x000000bb, 0x00000016,
395 }, {
396 0x00006300, 0x00007c00, 0x00007700, 0x00007b00,
397 0x0000f200, 0x00006b00, 0x00006f00, 0x0000c500,
398 0x00003000, 0x00000100, 0x00006700, 0x00002b00,
399 0x0000fe00, 0x0000d700, 0x0000ab00, 0x00007600,
400 0x0000ca00, 0x00008200, 0x0000c900, 0x00007d00,
401 0x0000fa00, 0x00005900, 0x00004700, 0x0000f000,
402 0x0000ad00, 0x0000d400, 0x0000a200, 0x0000af00,
403 0x00009c00, 0x0000a400, 0x00007200, 0x0000c000,
404 0x0000b700, 0x0000fd00, 0x00009300, 0x00002600,
405 0x00003600, 0x00003f00, 0x0000f700, 0x0000cc00,
406 0x00003400, 0x0000a500, 0x0000e500, 0x0000f100,
407 0x00007100, 0x0000d800, 0x00003100, 0x00001500,
408 0x00000400, 0x0000c700, 0x00002300, 0x0000c300,
409 0x00001800, 0x00009600, 0x00000500, 0x00009a00,
410 0x00000700, 0x00001200, 0x00008000, 0x0000e200,
411 0x0000eb00, 0x00002700, 0x0000b200, 0x00007500,
412 0x00000900, 0x00008300, 0x00002c00, 0x00001a00,
413 0x00001b00, 0x00006e00, 0x00005a00, 0x0000a000,
414 0x00005200, 0x00003b00, 0x0000d600, 0x0000b300,
415 0x00002900, 0x0000e300, 0x00002f00, 0x00008400,
416 0x00005300, 0x0000d100, 0x00000000, 0x0000ed00,
417 0x00002000, 0x0000fc00, 0x0000b100, 0x00005b00,
418 0x00006a00, 0x0000cb00, 0x0000be00, 0x00003900,
419 0x00004a00, 0x00004c00, 0x00005800, 0x0000cf00,
420 0x0000d000, 0x0000ef00, 0x0000aa00, 0x0000fb00,
421 0x00004300, 0x00004d00, 0x00003300, 0x00008500,
422 0x00004500, 0x0000f900, 0x00000200, 0x00007f00,
423 0x00005000, 0x00003c00, 0x00009f00, 0x0000a800,
424 0x00005100, 0x0000a300, 0x00004000, 0x00008f00,
425 0x00009200, 0x00009d00, 0x00003800, 0x0000f500,
426 0x0000bc00, 0x0000b600, 0x0000da00, 0x00002100,
427 0x00001000, 0x0000ff00, 0x0000f300, 0x0000d200,
428 0x0000cd00, 0x00000c00, 0x00001300, 0x0000ec00,
429 0x00005f00, 0x00009700, 0x00004400, 0x00001700,
430 0x0000c400, 0x0000a700, 0x00007e00, 0x00003d00,
431 0x00006400, 0x00005d00, 0x00001900, 0x00007300,
432 0x00006000, 0x00008100, 0x00004f00, 0x0000dc00,
433 0x00002200, 0x00002a00, 0x00009000, 0x00008800,
434 0x00004600, 0x0000ee00, 0x0000b800, 0x00001400,
435 0x0000de00, 0x00005e00, 0x00000b00, 0x0000db00,
436 0x0000e000, 0x00003200, 0x00003a00, 0x00000a00,
437 0x00004900, 0x00000600, 0x00002400, 0x00005c00,
438 0x0000c200, 0x0000d300, 0x0000ac00, 0x00006200,
439 0x00009100, 0x00009500, 0x0000e400, 0x00007900,
440 0x0000e700, 0x0000c800, 0x00003700, 0x00006d00,
441 0x00008d00, 0x0000d500, 0x00004e00, 0x0000a900,
442 0x00006c00, 0x00005600, 0x0000f400, 0x0000ea00,
443 0x00006500, 0x00007a00, 0x0000ae00, 0x00000800,
444 0x0000ba00, 0x00007800, 0x00002500, 0x00002e00,
445 0x00001c00, 0x0000a600, 0x0000b400, 0x0000c600,
446 0x0000e800, 0x0000dd00, 0x00007400, 0x00001f00,
447 0x00004b00, 0x0000bd00, 0x00008b00, 0x00008a00,
448 0x00007000, 0x00003e00, 0x0000b500, 0x00006600,
449 0x00004800, 0x00000300, 0x0000f600, 0x00000e00,
450 0x00006100, 0x00003500, 0x00005700, 0x0000b900,
451 0x00008600, 0x0000c100, 0x00001d00, 0x00009e00,
452 0x0000e100, 0x0000f800, 0x00009800, 0x00001100,
453 0x00006900, 0x0000d900, 0x00008e00, 0x00009400,
454 0x00009b00, 0x00001e00, 0x00008700, 0x0000e900,
455 0x0000ce00, 0x00005500, 0x00002800, 0x0000df00,
456 0x00008c00, 0x0000a100, 0x00008900, 0x00000d00,
457 0x0000bf00, 0x0000e600, 0x00004200, 0x00006800,
458 0x00004100, 0x00009900, 0x00002d00, 0x00000f00,
459 0x0000b000, 0x00005400, 0x0000bb00, 0x00001600,
460 }, {
461 0x00630000, 0x007c0000, 0x00770000, 0x007b0000,
462 0x00f20000, 0x006b0000, 0x006f0000, 0x00c50000,
463 0x00300000, 0x00010000, 0x00670000, 0x002b0000,
464 0x00fe0000, 0x00d70000, 0x00ab0000, 0x00760000,
465 0x00ca0000, 0x00820000, 0x00c90000, 0x007d0000,
466 0x00fa0000, 0x00590000, 0x00470000, 0x00f00000,
467 0x00ad0000, 0x00d40000, 0x00a20000, 0x00af0000,
468 0x009c0000, 0x00a40000, 0x00720000, 0x00c00000,
469 0x00b70000, 0x00fd0000, 0x00930000, 0x00260000,
470 0x00360000, 0x003f0000, 0x00f70000, 0x00cc0000,
471 0x00340000, 0x00a50000, 0x00e50000, 0x00f10000,
472 0x00710000, 0x00d80000, 0x00310000, 0x00150000,
473 0x00040000, 0x00c70000, 0x00230000, 0x00c30000,
474 0x00180000, 0x00960000, 0x00050000, 0x009a0000,
475 0x00070000, 0x00120000, 0x00800000, 0x00e20000,
476 0x00eb0000, 0x00270000, 0x00b20000, 0x00750000,
477 0x00090000, 0x00830000, 0x002c0000, 0x001a0000,
478 0x001b0000, 0x006e0000, 0x005a0000, 0x00a00000,
479 0x00520000, 0x003b0000, 0x00d60000, 0x00b30000,
480 0x00290000, 0x00e30000, 0x002f0000, 0x00840000,
481 0x00530000, 0x00d10000, 0x00000000, 0x00ed0000,
482 0x00200000, 0x00fc0000, 0x00b10000, 0x005b0000,
483 0x006a0000, 0x00cb0000, 0x00be0000, 0x00390000,
484 0x004a0000, 0x004c0000, 0x00580000, 0x00cf0000,
485 0x00d00000, 0x00ef0000, 0x00aa0000, 0x00fb0000,
486 0x00430000, 0x004d0000, 0x00330000, 0x00850000,
487 0x00450000, 0x00f90000, 0x00020000, 0x007f0000,
488 0x00500000, 0x003c0000, 0x009f0000, 0x00a80000,
489 0x00510000, 0x00a30000, 0x00400000, 0x008f0000,
490 0x00920000, 0x009d0000, 0x00380000, 0x00f50000,
491 0x00bc0000, 0x00b60000, 0x00da0000, 0x00210000,
492 0x00100000, 0x00ff0000, 0x00f30000, 0x00d20000,
493 0x00cd0000, 0x000c0000, 0x00130000, 0x00ec0000,
494 0x005f0000, 0x00970000, 0x00440000, 0x00170000,
495 0x00c40000, 0x00a70000, 0x007e0000, 0x003d0000,
496 0x00640000, 0x005d0000, 0x00190000, 0x00730000,
497 0x00600000, 0x00810000, 0x004f0000, 0x00dc0000,
498 0x00220000, 0x002a0000, 0x00900000, 0x00880000,
499 0x00460000, 0x00ee0000, 0x00b80000, 0x00140000,
500 0x00de0000, 0x005e0000, 0x000b0000, 0x00db0000,
501 0x00e00000, 0x00320000, 0x003a0000, 0x000a0000,
502 0x00490000, 0x00060000, 0x00240000, 0x005c0000,
503 0x00c20000, 0x00d30000, 0x00ac0000, 0x00620000,
504 0x00910000, 0x00950000, 0x00e40000, 0x00790000,
505 0x00e70000, 0x00c80000, 0x00370000, 0x006d0000,
506 0x008d0000, 0x00d50000, 0x004e0000, 0x00a90000,
507 0x006c0000, 0x00560000, 0x00f40000, 0x00ea0000,
508 0x00650000, 0x007a0000, 0x00ae0000, 0x00080000,
509 0x00ba0000, 0x00780000, 0x00250000, 0x002e0000,
510 0x001c0000, 0x00a60000, 0x00b40000, 0x00c60000,
511 0x00e80000, 0x00dd0000, 0x00740000, 0x001f0000,
512 0x004b0000, 0x00bd0000, 0x008b0000, 0x008a0000,
513 0x00700000, 0x003e0000, 0x00b50000, 0x00660000,
514 0x00480000, 0x00030000, 0x00f60000, 0x000e0000,
515 0x00610000, 0x00350000, 0x00570000, 0x00b90000,
516 0x00860000, 0x00c10000, 0x001d0000, 0x009e0000,
517 0x00e10000, 0x00f80000, 0x00980000, 0x00110000,
518 0x00690000, 0x00d90000, 0x008e0000, 0x00940000,
519 0x009b0000, 0x001e0000, 0x00870000, 0x00e90000,
520 0x00ce0000, 0x00550000, 0x00280000, 0x00df0000,
521 0x008c0000, 0x00a10000, 0x00890000, 0x000d0000,
522 0x00bf0000, 0x00e60000, 0x00420000, 0x00680000,
523 0x00410000, 0x00990000, 0x002d0000, 0x000f0000,
524 0x00b00000, 0x00540000, 0x00bb0000, 0x00160000,
525 }, {
526 0x63000000, 0x7c000000, 0x77000000, 0x7b000000,
527 0xf2000000, 0x6b000000, 0x6f000000, 0xc5000000,
528 0x30000000, 0x01000000, 0x67000000, 0x2b000000,
529 0xfe000000, 0xd7000000, 0xab000000, 0x76000000,
530 0xca000000, 0x82000000, 0xc9000000, 0x7d000000,
531 0xfa000000, 0x59000000, 0x47000000, 0xf0000000,
532 0xad000000, 0xd4000000, 0xa2000000, 0xaf000000,
533 0x9c000000, 0xa4000000, 0x72000000, 0xc0000000,
534 0xb7000000, 0xfd000000, 0x93000000, 0x26000000,
535 0x36000000, 0x3f000000, 0xf7000000, 0xcc000000,
536 0x34000000, 0xa5000000, 0xe5000000, 0xf1000000,
537 0x71000000, 0xd8000000, 0x31000000, 0x15000000,
538 0x04000000, 0xc7000000, 0x23000000, 0xc3000000,
539 0x18000000, 0x96000000, 0x05000000, 0x9a000000,
540 0x07000000, 0x12000000, 0x80000000, 0xe2000000,
541 0xeb000000, 0x27000000, 0xb2000000, 0x75000000,
542 0x09000000, 0x83000000, 0x2c000000, 0x1a000000,
543 0x1b000000, 0x6e000000, 0x5a000000, 0xa0000000,
544 0x52000000, 0x3b000000, 0xd6000000, 0xb3000000,
545 0x29000000, 0xe3000000, 0x2f000000, 0x84000000,
546 0x53000000, 0xd1000000, 0x00000000, 0xed000000,
547 0x20000000, 0xfc000000, 0xb1000000, 0x5b000000,
548 0x6a000000, 0xcb000000, 0xbe000000, 0x39000000,
549 0x4a000000, 0x4c000000, 0x58000000, 0xcf000000,
550 0xd0000000, 0xef000000, 0xaa000000, 0xfb000000,
551 0x43000000, 0x4d000000, 0x33000000, 0x85000000,
552 0x45000000, 0xf9000000, 0x02000000, 0x7f000000,
553 0x50000000, 0x3c000000, 0x9f000000, 0xa8000000,
554 0x51000000, 0xa3000000, 0x40000000, 0x8f000000,
555 0x92000000, 0x9d000000, 0x38000000, 0xf5000000,
556 0xbc000000, 0xb6000000, 0xda000000, 0x21000000,
557 0x10000000, 0xff000000, 0xf3000000, 0xd2000000,
558 0xcd000000, 0x0c000000, 0x13000000, 0xec000000,
559 0x5f000000, 0x97000000, 0x44000000, 0x17000000,
560 0xc4000000, 0xa7000000, 0x7e000000, 0x3d000000,
561 0x64000000, 0x5d000000, 0x19000000, 0x73000000,
562 0x60000000, 0x81000000, 0x4f000000, 0xdc000000,
563 0x22000000, 0x2a000000, 0x90000000, 0x88000000,
564 0x46000000, 0xee000000, 0xb8000000, 0x14000000,
565 0xde000000, 0x5e000000, 0x0b000000, 0xdb000000,
566 0xe0000000, 0x32000000, 0x3a000000, 0x0a000000,
567 0x49000000, 0x06000000, 0x24000000, 0x5c000000,
568 0xc2000000, 0xd3000000, 0xac000000, 0x62000000,
569 0x91000000, 0x95000000, 0xe4000000, 0x79000000,
570 0xe7000000, 0xc8000000, 0x37000000, 0x6d000000,
571 0x8d000000, 0xd5000000, 0x4e000000, 0xa9000000,
572 0x6c000000, 0x56000000, 0xf4000000, 0xea000000,
573 0x65000000, 0x7a000000, 0xae000000, 0x08000000,
574 0xba000000, 0x78000000, 0x25000000, 0x2e000000,
575 0x1c000000, 0xa6000000, 0xb4000000, 0xc6000000,
576 0xe8000000, 0xdd000000, 0x74000000, 0x1f000000,
577 0x4b000000, 0xbd000000, 0x8b000000, 0x8a000000,
578 0x70000000, 0x3e000000, 0xb5000000, 0x66000000,
579 0x48000000, 0x03000000, 0xf6000000, 0x0e000000,
580 0x61000000, 0x35000000, 0x57000000, 0xb9000000,
581 0x86000000, 0xc1000000, 0x1d000000, 0x9e000000,
582 0xe1000000, 0xf8000000, 0x98000000, 0x11000000,
583 0x69000000, 0xd9000000, 0x8e000000, 0x94000000,
584 0x9b000000, 0x1e000000, 0x87000000, 0xe9000000,
585 0xce000000, 0x55000000, 0x28000000, 0xdf000000,
586 0x8c000000, 0xa1000000, 0x89000000, 0x0d000000,
587 0xbf000000, 0xe6000000, 0x42000000, 0x68000000,
588 0x41000000, 0x99000000, 0x2d000000, 0x0f000000,
589 0xb0000000, 0x54000000, 0xbb000000, 0x16000000,
112 } 590 }
591};
113 592
114 for (i = 0; i < 256; ++i) { 593const u32 crypto_it_tab[4][256] = {
115 p = (i ? pow_tab[255 - log_tab[i]] : 0); 594 {
116 q = ((p >> 7) | (p << 1)) ^ ((p >> 6) | (p << 2)); 595 0x50a7f451, 0x5365417e, 0xc3a4171a, 0x965e273a,
117 p ^= 0x63 ^ q ^ ((q >> 6) | (q << 2)); 596 0xcb6bab3b, 0xf1459d1f, 0xab58faac, 0x9303e34b,
118 sbx_tab[i] = p; 597 0x55fa3020, 0xf66d76ad, 0x9176cc88, 0x254c02f5,
119 isb_tab[p] = (u8) i; 598 0xfcd7e54f, 0xd7cb2ac5, 0x80443526, 0x8fa362b5,
599 0x495ab1de, 0x671bba25, 0x980eea45, 0xe1c0fe5d,
600 0x02752fc3, 0x12f04c81, 0xa397468d, 0xc6f9d36b,
601 0xe75f8f03, 0x959c9215, 0xeb7a6dbf, 0xda595295,
602 0x2d83bed4, 0xd3217458, 0x2969e049, 0x44c8c98e,
603 0x6a89c275, 0x78798ef4, 0x6b3e5899, 0xdd71b927,
604 0xb64fe1be, 0x17ad88f0, 0x66ac20c9, 0xb43ace7d,
605 0x184adf63, 0x82311ae5, 0x60335197, 0x457f5362,
606 0xe07764b1, 0x84ae6bbb, 0x1ca081fe, 0x942b08f9,
607 0x58684870, 0x19fd458f, 0x876cde94, 0xb7f87b52,
608 0x23d373ab, 0xe2024b72, 0x578f1fe3, 0x2aab5566,
609 0x0728ebb2, 0x03c2b52f, 0x9a7bc586, 0xa50837d3,
610 0xf2872830, 0xb2a5bf23, 0xba6a0302, 0x5c8216ed,
611 0x2b1ccf8a, 0x92b479a7, 0xf0f207f3, 0xa1e2694e,
612 0xcdf4da65, 0xd5be0506, 0x1f6234d1, 0x8afea6c4,
613 0x9d532e34, 0xa055f3a2, 0x32e18a05, 0x75ebf6a4,
614 0x39ec830b, 0xaaef6040, 0x069f715e, 0x51106ebd,
615 0xf98a213e, 0x3d06dd96, 0xae053edd, 0x46bde64d,
616 0xb58d5491, 0x055dc471, 0x6fd40604, 0xff155060,
617 0x24fb9819, 0x97e9bdd6, 0xcc434089, 0x779ed967,
618 0xbd42e8b0, 0x888b8907, 0x385b19e7, 0xdbeec879,
619 0x470a7ca1, 0xe90f427c, 0xc91e84f8, 0x00000000,
620 0x83868009, 0x48ed2b32, 0xac70111e, 0x4e725a6c,
621 0xfbff0efd, 0x5638850f, 0x1ed5ae3d, 0x27392d36,
622 0x64d90f0a, 0x21a65c68, 0xd1545b9b, 0x3a2e3624,
623 0xb1670a0c, 0x0fe75793, 0xd296eeb4, 0x9e919b1b,
624 0x4fc5c080, 0xa220dc61, 0x694b775a, 0x161a121c,
625 0x0aba93e2, 0xe52aa0c0, 0x43e0223c, 0x1d171b12,
626 0x0b0d090e, 0xadc78bf2, 0xb9a8b62d, 0xc8a91e14,
627 0x8519f157, 0x4c0775af, 0xbbdd99ee, 0xfd607fa3,
628 0x9f2601f7, 0xbcf5725c, 0xc53b6644, 0x347efb5b,
629 0x7629438b, 0xdcc623cb, 0x68fcedb6, 0x63f1e4b8,
630 0xcadc31d7, 0x10856342, 0x40229713, 0x2011c684,
631 0x7d244a85, 0xf83dbbd2, 0x1132f9ae, 0x6da129c7,
632 0x4b2f9e1d, 0xf330b2dc, 0xec52860d, 0xd0e3c177,
633 0x6c16b32b, 0x99b970a9, 0xfa489411, 0x2264e947,
634 0xc48cfca8, 0x1a3ff0a0, 0xd82c7d56, 0xef903322,
635 0xc74e4987, 0xc1d138d9, 0xfea2ca8c, 0x360bd498,
636 0xcf81f5a6, 0x28de7aa5, 0x268eb7da, 0xa4bfad3f,
637 0xe49d3a2c, 0x0d927850, 0x9bcc5f6a, 0x62467e54,
638 0xc2138df6, 0xe8b8d890, 0x5ef7392e, 0xf5afc382,
639 0xbe805d9f, 0x7c93d069, 0xa92dd56f, 0xb31225cf,
640 0x3b99acc8, 0xa77d1810, 0x6e639ce8, 0x7bbb3bdb,
641 0x097826cd, 0xf418596e, 0x01b79aec, 0xa89a4f83,
642 0x656e95e6, 0x7ee6ffaa, 0x08cfbc21, 0xe6e815ef,
643 0xd99be7ba, 0xce366f4a, 0xd4099fea, 0xd67cb029,
644 0xafb2a431, 0x31233f2a, 0x3094a5c6, 0xc066a235,
645 0x37bc4e74, 0xa6ca82fc, 0xb0d090e0, 0x15d8a733,
646 0x4a9804f1, 0xf7daec41, 0x0e50cd7f, 0x2ff69117,
647 0x8dd64d76, 0x4db0ef43, 0x544daacc, 0xdf0496e4,
648 0xe3b5d19e, 0x1b886a4c, 0xb81f2cc1, 0x7f516546,
649 0x04ea5e9d, 0x5d358c01, 0x737487fa, 0x2e410bfb,
650 0x5a1d67b3, 0x52d2db92, 0x335610e9, 0x1347d66d,
651 0x8c61d79a, 0x7a0ca137, 0x8e14f859, 0x893c13eb,
652 0xee27a9ce, 0x35c961b7, 0xede51ce1, 0x3cb1477a,
653 0x59dfd29c, 0x3f73f255, 0x79ce1418, 0xbf37c773,
654 0xeacdf753, 0x5baafd5f, 0x146f3ddf, 0x86db4478,
655 0x81f3afca, 0x3ec468b9, 0x2c342438, 0x5f40a3c2,
656 0x72c31d16, 0x0c25e2bc, 0x8b493c28, 0x41950dff,
657 0x7101a839, 0xdeb30c08, 0x9ce4b4d8, 0x90c15664,
658 0x6184cb7b, 0x70b632d5, 0x745c6c48, 0x4257b8d0,
659 }, {
660 0xa7f45150, 0x65417e53, 0xa4171ac3, 0x5e273a96,
661 0x6bab3bcb, 0x459d1ff1, 0x58faacab, 0x03e34b93,
662 0xfa302055, 0x6d76adf6, 0x76cc8891, 0x4c02f525,
663 0xd7e54ffc, 0xcb2ac5d7, 0x44352680, 0xa362b58f,
664 0x5ab1de49, 0x1bba2567, 0x0eea4598, 0xc0fe5de1,
665 0x752fc302, 0xf04c8112, 0x97468da3, 0xf9d36bc6,
666 0x5f8f03e7, 0x9c921595, 0x7a6dbfeb, 0x595295da,
667 0x83bed42d, 0x217458d3, 0x69e04929, 0xc8c98e44,
668 0x89c2756a, 0x798ef478, 0x3e58996b, 0x71b927dd,
669 0x4fe1beb6, 0xad88f017, 0xac20c966, 0x3ace7db4,
670 0x4adf6318, 0x311ae582, 0x33519760, 0x7f536245,
671 0x7764b1e0, 0xae6bbb84, 0xa081fe1c, 0x2b08f994,
672 0x68487058, 0xfd458f19, 0x6cde9487, 0xf87b52b7,
673 0xd373ab23, 0x024b72e2, 0x8f1fe357, 0xab55662a,
674 0x28ebb207, 0xc2b52f03, 0x7bc5869a, 0x0837d3a5,
675 0x872830f2, 0xa5bf23b2, 0x6a0302ba, 0x8216ed5c,
676 0x1ccf8a2b, 0xb479a792, 0xf207f3f0, 0xe2694ea1,
677 0xf4da65cd, 0xbe0506d5, 0x6234d11f, 0xfea6c48a,
678 0x532e349d, 0x55f3a2a0, 0xe18a0532, 0xebf6a475,
679 0xec830b39, 0xef6040aa, 0x9f715e06, 0x106ebd51,
680 0x8a213ef9, 0x06dd963d, 0x053eddae, 0xbde64d46,
681 0x8d5491b5, 0x5dc47105, 0xd406046f, 0x155060ff,
682 0xfb981924, 0xe9bdd697, 0x434089cc, 0x9ed96777,
683 0x42e8b0bd, 0x8b890788, 0x5b19e738, 0xeec879db,
684 0x0a7ca147, 0x0f427ce9, 0x1e84f8c9, 0x00000000,
685 0x86800983, 0xed2b3248, 0x70111eac, 0x725a6c4e,
686 0xff0efdfb, 0x38850f56, 0xd5ae3d1e, 0x392d3627,
687 0xd90f0a64, 0xa65c6821, 0x545b9bd1, 0x2e36243a,
688 0x670a0cb1, 0xe757930f, 0x96eeb4d2, 0x919b1b9e,
689 0xc5c0804f, 0x20dc61a2, 0x4b775a69, 0x1a121c16,
690 0xba93e20a, 0x2aa0c0e5, 0xe0223c43, 0x171b121d,
691 0x0d090e0b, 0xc78bf2ad, 0xa8b62db9, 0xa91e14c8,
692 0x19f15785, 0x0775af4c, 0xdd99eebb, 0x607fa3fd,
693 0x2601f79f, 0xf5725cbc, 0x3b6644c5, 0x7efb5b34,
694 0x29438b76, 0xc623cbdc, 0xfcedb668, 0xf1e4b863,
695 0xdc31d7ca, 0x85634210, 0x22971340, 0x11c68420,
696 0x244a857d, 0x3dbbd2f8, 0x32f9ae11, 0xa129c76d,
697 0x2f9e1d4b, 0x30b2dcf3, 0x52860dec, 0xe3c177d0,
698 0x16b32b6c, 0xb970a999, 0x489411fa, 0x64e94722,
699 0x8cfca8c4, 0x3ff0a01a, 0x2c7d56d8, 0x903322ef,
700 0x4e4987c7, 0xd138d9c1, 0xa2ca8cfe, 0x0bd49836,
701 0x81f5a6cf, 0xde7aa528, 0x8eb7da26, 0xbfad3fa4,
702 0x9d3a2ce4, 0x9278500d, 0xcc5f6a9b, 0x467e5462,
703 0x138df6c2, 0xb8d890e8, 0xf7392e5e, 0xafc382f5,
704 0x805d9fbe, 0x93d0697c, 0x2dd56fa9, 0x1225cfb3,
705 0x99acc83b, 0x7d1810a7, 0x639ce86e, 0xbb3bdb7b,
706 0x7826cd09, 0x18596ef4, 0xb79aec01, 0x9a4f83a8,
707 0x6e95e665, 0xe6ffaa7e, 0xcfbc2108, 0xe815efe6,
708 0x9be7bad9, 0x366f4ace, 0x099fead4, 0x7cb029d6,
709 0xb2a431af, 0x233f2a31, 0x94a5c630, 0x66a235c0,
710 0xbc4e7437, 0xca82fca6, 0xd090e0b0, 0xd8a73315,
711 0x9804f14a, 0xdaec41f7, 0x50cd7f0e, 0xf691172f,
712 0xd64d768d, 0xb0ef434d, 0x4daacc54, 0x0496e4df,
713 0xb5d19ee3, 0x886a4c1b, 0x1f2cc1b8, 0x5165467f,
714 0xea5e9d04, 0x358c015d, 0x7487fa73, 0x410bfb2e,
715 0x1d67b35a, 0xd2db9252, 0x5610e933, 0x47d66d13,
716 0x61d79a8c, 0x0ca1377a, 0x14f8598e, 0x3c13eb89,
717 0x27a9ceee, 0xc961b735, 0xe51ce1ed, 0xb1477a3c,
718 0xdfd29c59, 0x73f2553f, 0xce141879, 0x37c773bf,
719 0xcdf753ea, 0xaafd5f5b, 0x6f3ddf14, 0xdb447886,
720 0xf3afca81, 0xc468b93e, 0x3424382c, 0x40a3c25f,
721 0xc31d1672, 0x25e2bc0c, 0x493c288b, 0x950dff41,
722 0x01a83971, 0xb30c08de, 0xe4b4d89c, 0xc1566490,
723 0x84cb7b61, 0xb632d570, 0x5c6c4874, 0x57b8d042,
724 }, {
725 0xf45150a7, 0x417e5365, 0x171ac3a4, 0x273a965e,
726 0xab3bcb6b, 0x9d1ff145, 0xfaacab58, 0xe34b9303,
727 0x302055fa, 0x76adf66d, 0xcc889176, 0x02f5254c,
728 0xe54ffcd7, 0x2ac5d7cb, 0x35268044, 0x62b58fa3,
729 0xb1de495a, 0xba25671b, 0xea45980e, 0xfe5de1c0,
730 0x2fc30275, 0x4c8112f0, 0x468da397, 0xd36bc6f9,
731 0x8f03e75f, 0x9215959c, 0x6dbfeb7a, 0x5295da59,
732 0xbed42d83, 0x7458d321, 0xe0492969, 0xc98e44c8,
733 0xc2756a89, 0x8ef47879, 0x58996b3e, 0xb927dd71,
734 0xe1beb64f, 0x88f017ad, 0x20c966ac, 0xce7db43a,
735 0xdf63184a, 0x1ae58231, 0x51976033, 0x5362457f,
736 0x64b1e077, 0x6bbb84ae, 0x81fe1ca0, 0x08f9942b,
737 0x48705868, 0x458f19fd, 0xde94876c, 0x7b52b7f8,
738 0x73ab23d3, 0x4b72e202, 0x1fe3578f, 0x55662aab,
739 0xebb20728, 0xb52f03c2, 0xc5869a7b, 0x37d3a508,
740 0x2830f287, 0xbf23b2a5, 0x0302ba6a, 0x16ed5c82,
741 0xcf8a2b1c, 0x79a792b4, 0x07f3f0f2, 0x694ea1e2,
742 0xda65cdf4, 0x0506d5be, 0x34d11f62, 0xa6c48afe,
743 0x2e349d53, 0xf3a2a055, 0x8a0532e1, 0xf6a475eb,
744 0x830b39ec, 0x6040aaef, 0x715e069f, 0x6ebd5110,
745 0x213ef98a, 0xdd963d06, 0x3eddae05, 0xe64d46bd,
746 0x5491b58d, 0xc471055d, 0x06046fd4, 0x5060ff15,
747 0x981924fb, 0xbdd697e9, 0x4089cc43, 0xd967779e,
748 0xe8b0bd42, 0x8907888b, 0x19e7385b, 0xc879dbee,
749 0x7ca1470a, 0x427ce90f, 0x84f8c91e, 0x00000000,
750 0x80098386, 0x2b3248ed, 0x111eac70, 0x5a6c4e72,
751 0x0efdfbff, 0x850f5638, 0xae3d1ed5, 0x2d362739,
752 0x0f0a64d9, 0x5c6821a6, 0x5b9bd154, 0x36243a2e,
753 0x0a0cb167, 0x57930fe7, 0xeeb4d296, 0x9b1b9e91,
754 0xc0804fc5, 0xdc61a220, 0x775a694b, 0x121c161a,
755 0x93e20aba, 0xa0c0e52a, 0x223c43e0, 0x1b121d17,
756 0x090e0b0d, 0x8bf2adc7, 0xb62db9a8, 0x1e14c8a9,
757 0xf1578519, 0x75af4c07, 0x99eebbdd, 0x7fa3fd60,
758 0x01f79f26, 0x725cbcf5, 0x6644c53b, 0xfb5b347e,
759 0x438b7629, 0x23cbdcc6, 0xedb668fc, 0xe4b863f1,
760 0x31d7cadc, 0x63421085, 0x97134022, 0xc6842011,
761 0x4a857d24, 0xbbd2f83d, 0xf9ae1132, 0x29c76da1,
762 0x9e1d4b2f, 0xb2dcf330, 0x860dec52, 0xc177d0e3,
763 0xb32b6c16, 0x70a999b9, 0x9411fa48, 0xe9472264,
764 0xfca8c48c, 0xf0a01a3f, 0x7d56d82c, 0x3322ef90,
765 0x4987c74e, 0x38d9c1d1, 0xca8cfea2, 0xd498360b,
766 0xf5a6cf81, 0x7aa528de, 0xb7da268e, 0xad3fa4bf,
767 0x3a2ce49d, 0x78500d92, 0x5f6a9bcc, 0x7e546246,
768 0x8df6c213, 0xd890e8b8, 0x392e5ef7, 0xc382f5af,
769 0x5d9fbe80, 0xd0697c93, 0xd56fa92d, 0x25cfb312,
770 0xacc83b99, 0x1810a77d, 0x9ce86e63, 0x3bdb7bbb,
771 0x26cd0978, 0x596ef418, 0x9aec01b7, 0x4f83a89a,
772 0x95e6656e, 0xffaa7ee6, 0xbc2108cf, 0x15efe6e8,
773 0xe7bad99b, 0x6f4ace36, 0x9fead409, 0xb029d67c,
774 0xa431afb2, 0x3f2a3123, 0xa5c63094, 0xa235c066,
775 0x4e7437bc, 0x82fca6ca, 0x90e0b0d0, 0xa73315d8,
776 0x04f14a98, 0xec41f7da, 0xcd7f0e50, 0x91172ff6,
777 0x4d768dd6, 0xef434db0, 0xaacc544d, 0x96e4df04,
778 0xd19ee3b5, 0x6a4c1b88, 0x2cc1b81f, 0x65467f51,
779 0x5e9d04ea, 0x8c015d35, 0x87fa7374, 0x0bfb2e41,
780 0x67b35a1d, 0xdb9252d2, 0x10e93356, 0xd66d1347,
781 0xd79a8c61, 0xa1377a0c, 0xf8598e14, 0x13eb893c,
782 0xa9ceee27, 0x61b735c9, 0x1ce1ede5, 0x477a3cb1,
783 0xd29c59df, 0xf2553f73, 0x141879ce, 0xc773bf37,
784 0xf753eacd, 0xfd5f5baa, 0x3ddf146f, 0x447886db,
785 0xafca81f3, 0x68b93ec4, 0x24382c34, 0xa3c25f40,
786 0x1d1672c3, 0xe2bc0c25, 0x3c288b49, 0x0dff4195,
787 0xa8397101, 0x0c08deb3, 0xb4d89ce4, 0x566490c1,
788 0xcb7b6184, 0x32d570b6, 0x6c48745c, 0xb8d04257,
789 }, {
790 0x5150a7f4, 0x7e536541, 0x1ac3a417, 0x3a965e27,
791 0x3bcb6bab, 0x1ff1459d, 0xacab58fa, 0x4b9303e3,
792 0x2055fa30, 0xadf66d76, 0x889176cc, 0xf5254c02,
793 0x4ffcd7e5, 0xc5d7cb2a, 0x26804435, 0xb58fa362,
794 0xde495ab1, 0x25671bba, 0x45980eea, 0x5de1c0fe,
795 0xc302752f, 0x8112f04c, 0x8da39746, 0x6bc6f9d3,
796 0x03e75f8f, 0x15959c92, 0xbfeb7a6d, 0x95da5952,
797 0xd42d83be, 0x58d32174, 0x492969e0, 0x8e44c8c9,
798 0x756a89c2, 0xf478798e, 0x996b3e58, 0x27dd71b9,
799 0xbeb64fe1, 0xf017ad88, 0xc966ac20, 0x7db43ace,
800 0x63184adf, 0xe582311a, 0x97603351, 0x62457f53,
801 0xb1e07764, 0xbb84ae6b, 0xfe1ca081, 0xf9942b08,
802 0x70586848, 0x8f19fd45, 0x94876cde, 0x52b7f87b,
803 0xab23d373, 0x72e2024b, 0xe3578f1f, 0x662aab55,
804 0xb20728eb, 0x2f03c2b5, 0x869a7bc5, 0xd3a50837,
805 0x30f28728, 0x23b2a5bf, 0x02ba6a03, 0xed5c8216,
806 0x8a2b1ccf, 0xa792b479, 0xf3f0f207, 0x4ea1e269,
807 0x65cdf4da, 0x06d5be05, 0xd11f6234, 0xc48afea6,
808 0x349d532e, 0xa2a055f3, 0x0532e18a, 0xa475ebf6,
809 0x0b39ec83, 0x40aaef60, 0x5e069f71, 0xbd51106e,
810 0x3ef98a21, 0x963d06dd, 0xddae053e, 0x4d46bde6,
811 0x91b58d54, 0x71055dc4, 0x046fd406, 0x60ff1550,
812 0x1924fb98, 0xd697e9bd, 0x89cc4340, 0x67779ed9,
813 0xb0bd42e8, 0x07888b89, 0xe7385b19, 0x79dbeec8,
814 0xa1470a7c, 0x7ce90f42, 0xf8c91e84, 0x00000000,
815 0x09838680, 0x3248ed2b, 0x1eac7011, 0x6c4e725a,
816 0xfdfbff0e, 0x0f563885, 0x3d1ed5ae, 0x3627392d,
817 0x0a64d90f, 0x6821a65c, 0x9bd1545b, 0x243a2e36,
818 0x0cb1670a, 0x930fe757, 0xb4d296ee, 0x1b9e919b,
819 0x804fc5c0, 0x61a220dc, 0x5a694b77, 0x1c161a12,
820 0xe20aba93, 0xc0e52aa0, 0x3c43e022, 0x121d171b,
821 0x0e0b0d09, 0xf2adc78b, 0x2db9a8b6, 0x14c8a91e,
822 0x578519f1, 0xaf4c0775, 0xeebbdd99, 0xa3fd607f,
823 0xf79f2601, 0x5cbcf572, 0x44c53b66, 0x5b347efb,
824 0x8b762943, 0xcbdcc623, 0xb668fced, 0xb863f1e4,
825 0xd7cadc31, 0x42108563, 0x13402297, 0x842011c6,
826 0x857d244a, 0xd2f83dbb, 0xae1132f9, 0xc76da129,
827 0x1d4b2f9e, 0xdcf330b2, 0x0dec5286, 0x77d0e3c1,
828 0x2b6c16b3, 0xa999b970, 0x11fa4894, 0x472264e9,
829 0xa8c48cfc, 0xa01a3ff0, 0x56d82c7d, 0x22ef9033,
830 0x87c74e49, 0xd9c1d138, 0x8cfea2ca, 0x98360bd4,
831 0xa6cf81f5, 0xa528de7a, 0xda268eb7, 0x3fa4bfad,
832 0x2ce49d3a, 0x500d9278, 0x6a9bcc5f, 0x5462467e,
833 0xf6c2138d, 0x90e8b8d8, 0x2e5ef739, 0x82f5afc3,
834 0x9fbe805d, 0x697c93d0, 0x6fa92dd5, 0xcfb31225,
835 0xc83b99ac, 0x10a77d18, 0xe86e639c, 0xdb7bbb3b,
836 0xcd097826, 0x6ef41859, 0xec01b79a, 0x83a89a4f,
837 0xe6656e95, 0xaa7ee6ff, 0x2108cfbc, 0xefe6e815,
838 0xbad99be7, 0x4ace366f, 0xead4099f, 0x29d67cb0,
839 0x31afb2a4, 0x2a31233f, 0xc63094a5, 0x35c066a2,
840 0x7437bc4e, 0xfca6ca82, 0xe0b0d090, 0x3315d8a7,
841 0xf14a9804, 0x41f7daec, 0x7f0e50cd, 0x172ff691,
842 0x768dd64d, 0x434db0ef, 0xcc544daa, 0xe4df0496,
843 0x9ee3b5d1, 0x4c1b886a, 0xc1b81f2c, 0x467f5165,
844 0x9d04ea5e, 0x015d358c, 0xfa737487, 0xfb2e410b,
845 0xb35a1d67, 0x9252d2db, 0xe9335610, 0x6d1347d6,
846 0x9a8c61d7, 0x377a0ca1, 0x598e14f8, 0xeb893c13,
847 0xceee27a9, 0xb735c961, 0xe1ede51c, 0x7a3cb147,
848 0x9c59dfd2, 0x553f73f2, 0x1879ce14, 0x73bf37c7,
849 0x53eacdf7, 0x5f5baafd, 0xdf146f3d, 0x7886db44,
850 0xca81f3af, 0xb93ec468, 0x382c3424, 0xc25f40a3,
851 0x1672c31d, 0xbc0c25e2, 0x288b493c, 0xff41950d,
852 0x397101a8, 0x08deb30c, 0xd89ce4b4, 0x6490c156,
853 0x7b6184cb, 0xd570b632, 0x48745c6c, 0xd04257b8,
120 } 854 }
855};
121 856
122 for (i = 0; i < 256; ++i) { 857const u32 crypto_il_tab[4][256] = {
123 p = sbx_tab[i]; 858 {
124 859 0x00000052, 0x00000009, 0x0000006a, 0x000000d5,
125 t = p; 860 0x00000030, 0x00000036, 0x000000a5, 0x00000038,
126 crypto_fl_tab[0][i] = t; 861 0x000000bf, 0x00000040, 0x000000a3, 0x0000009e,
127 crypto_fl_tab[1][i] = rol32(t, 8); 862 0x00000081, 0x000000f3, 0x000000d7, 0x000000fb,
128 crypto_fl_tab[2][i] = rol32(t, 16); 863 0x0000007c, 0x000000e3, 0x00000039, 0x00000082,
129 crypto_fl_tab[3][i] = rol32(t, 24); 864 0x0000009b, 0x0000002f, 0x000000ff, 0x00000087,
130 865 0x00000034, 0x0000008e, 0x00000043, 0x00000044,
131 t = ((u32) ff_mult(2, p)) | 866 0x000000c4, 0x000000de, 0x000000e9, 0x000000cb,
132 ((u32) p << 8) | 867 0x00000054, 0x0000007b, 0x00000094, 0x00000032,
133 ((u32) p << 16) | ((u32) ff_mult(3, p) << 24); 868 0x000000a6, 0x000000c2, 0x00000023, 0x0000003d,
134 869 0x000000ee, 0x0000004c, 0x00000095, 0x0000000b,
135 crypto_ft_tab[0][i] = t; 870 0x00000042, 0x000000fa, 0x000000c3, 0x0000004e,
136 crypto_ft_tab[1][i] = rol32(t, 8); 871 0x00000008, 0x0000002e, 0x000000a1, 0x00000066,
137 crypto_ft_tab[2][i] = rol32(t, 16); 872 0x00000028, 0x000000d9, 0x00000024, 0x000000b2,
138 crypto_ft_tab[3][i] = rol32(t, 24); 873 0x00000076, 0x0000005b, 0x000000a2, 0x00000049,
139 874 0x0000006d, 0x0000008b, 0x000000d1, 0x00000025,
140 p = isb_tab[i]; 875 0x00000072, 0x000000f8, 0x000000f6, 0x00000064,
141 876 0x00000086, 0x00000068, 0x00000098, 0x00000016,
142 t = p; 877 0x000000d4, 0x000000a4, 0x0000005c, 0x000000cc,
143 crypto_il_tab[0][i] = t; 878 0x0000005d, 0x00000065, 0x000000b6, 0x00000092,
144 crypto_il_tab[1][i] = rol32(t, 8); 879 0x0000006c, 0x00000070, 0x00000048, 0x00000050,
145 crypto_il_tab[2][i] = rol32(t, 16); 880 0x000000fd, 0x000000ed, 0x000000b9, 0x000000da,
146 crypto_il_tab[3][i] = rol32(t, 24); 881 0x0000005e, 0x00000015, 0x00000046, 0x00000057,
147 882 0x000000a7, 0x0000008d, 0x0000009d, 0x00000084,
148 t = ((u32) ff_mult(14, p)) | 883 0x00000090, 0x000000d8, 0x000000ab, 0x00000000,
149 ((u32) ff_mult(9, p) << 8) | 884 0x0000008c, 0x000000bc, 0x000000d3, 0x0000000a,
150 ((u32) ff_mult(13, p) << 16) | 885 0x000000f7, 0x000000e4, 0x00000058, 0x00000005,
151 ((u32) ff_mult(11, p) << 24); 886 0x000000b8, 0x000000b3, 0x00000045, 0x00000006,
152 887 0x000000d0, 0x0000002c, 0x0000001e, 0x0000008f,
153 crypto_it_tab[0][i] = t; 888 0x000000ca, 0x0000003f, 0x0000000f, 0x00000002,
154 crypto_it_tab[1][i] = rol32(t, 8); 889 0x000000c1, 0x000000af, 0x000000bd, 0x00000003,
155 crypto_it_tab[2][i] = rol32(t, 16); 890 0x00000001, 0x00000013, 0x0000008a, 0x0000006b,
156 crypto_it_tab[3][i] = rol32(t, 24); 891 0x0000003a, 0x00000091, 0x00000011, 0x00000041,
892 0x0000004f, 0x00000067, 0x000000dc, 0x000000ea,
893 0x00000097, 0x000000f2, 0x000000cf, 0x000000ce,
894 0x000000f0, 0x000000b4, 0x000000e6, 0x00000073,
895 0x00000096, 0x000000ac, 0x00000074, 0x00000022,
896 0x000000e7, 0x000000ad, 0x00000035, 0x00000085,
897 0x000000e2, 0x000000f9, 0x00000037, 0x000000e8,
898 0x0000001c, 0x00000075, 0x000000df, 0x0000006e,
899 0x00000047, 0x000000f1, 0x0000001a, 0x00000071,
900 0x0000001d, 0x00000029, 0x000000c5, 0x00000089,
901 0x0000006f, 0x000000b7, 0x00000062, 0x0000000e,
902 0x000000aa, 0x00000018, 0x000000be, 0x0000001b,
903 0x000000fc, 0x00000056, 0x0000003e, 0x0000004b,
904 0x000000c6, 0x000000d2, 0x00000079, 0x00000020,
905 0x0000009a, 0x000000db, 0x000000c0, 0x000000fe,
906 0x00000078, 0x000000cd, 0x0000005a, 0x000000f4,
907 0x0000001f, 0x000000dd, 0x000000a8, 0x00000033,
908 0x00000088, 0x00000007, 0x000000c7, 0x00000031,
909 0x000000b1, 0x00000012, 0x00000010, 0x00000059,
910 0x00000027, 0x00000080, 0x000000ec, 0x0000005f,
911 0x00000060, 0x00000051, 0x0000007f, 0x000000a9,
912 0x00000019, 0x000000b5, 0x0000004a, 0x0000000d,
913 0x0000002d, 0x000000e5, 0x0000007a, 0x0000009f,
914 0x00000093, 0x000000c9, 0x0000009c, 0x000000ef,
915 0x000000a0, 0x000000e0, 0x0000003b, 0x0000004d,
916 0x000000ae, 0x0000002a, 0x000000f5, 0x000000b0,
917 0x000000c8, 0x000000eb, 0x000000bb, 0x0000003c,
918 0x00000083, 0x00000053, 0x00000099, 0x00000061,
919 0x00000017, 0x0000002b, 0x00000004, 0x0000007e,
920 0x000000ba, 0x00000077, 0x000000d6, 0x00000026,
921 0x000000e1, 0x00000069, 0x00000014, 0x00000063,
922 0x00000055, 0x00000021, 0x0000000c, 0x0000007d,
923 }, {
924 0x00005200, 0x00000900, 0x00006a00, 0x0000d500,
925 0x00003000, 0x00003600, 0x0000a500, 0x00003800,
926 0x0000bf00, 0x00004000, 0x0000a300, 0x00009e00,
927 0x00008100, 0x0000f300, 0x0000d700, 0x0000fb00,
928 0x00007c00, 0x0000e300, 0x00003900, 0x00008200,
929 0x00009b00, 0x00002f00, 0x0000ff00, 0x00008700,
930 0x00003400, 0x00008e00, 0x00004300, 0x00004400,
931 0x0000c400, 0x0000de00, 0x0000e900, 0x0000cb00,
932 0x00005400, 0x00007b00, 0x00009400, 0x00003200,
933 0x0000a600, 0x0000c200, 0x00002300, 0x00003d00,
934 0x0000ee00, 0x00004c00, 0x00009500, 0x00000b00,
935 0x00004200, 0x0000fa00, 0x0000c300, 0x00004e00,
936 0x00000800, 0x00002e00, 0x0000a100, 0x00006600,
937 0x00002800, 0x0000d900, 0x00002400, 0x0000b200,
938 0x00007600, 0x00005b00, 0x0000a200, 0x00004900,
939 0x00006d00, 0x00008b00, 0x0000d100, 0x00002500,
940 0x00007200, 0x0000f800, 0x0000f600, 0x00006400,
941 0x00008600, 0x00006800, 0x00009800, 0x00001600,
942 0x0000d400, 0x0000a400, 0x00005c00, 0x0000cc00,
943 0x00005d00, 0x00006500, 0x0000b600, 0x00009200,
944 0x00006c00, 0x00007000, 0x00004800, 0x00005000,
945 0x0000fd00, 0x0000ed00, 0x0000b900, 0x0000da00,
946 0x00005e00, 0x00001500, 0x00004600, 0x00005700,
947 0x0000a700, 0x00008d00, 0x00009d00, 0x00008400,
948 0x00009000, 0x0000d800, 0x0000ab00, 0x00000000,
949 0x00008c00, 0x0000bc00, 0x0000d300, 0x00000a00,
950 0x0000f700, 0x0000e400, 0x00005800, 0x00000500,
951 0x0000b800, 0x0000b300, 0x00004500, 0x00000600,
952 0x0000d000, 0x00002c00, 0x00001e00, 0x00008f00,
953 0x0000ca00, 0x00003f00, 0x00000f00, 0x00000200,
954 0x0000c100, 0x0000af00, 0x0000bd00, 0x00000300,
955 0x00000100, 0x00001300, 0x00008a00, 0x00006b00,
956 0x00003a00, 0x00009100, 0x00001100, 0x00004100,
957 0x00004f00, 0x00006700, 0x0000dc00, 0x0000ea00,
958 0x00009700, 0x0000f200, 0x0000cf00, 0x0000ce00,
959 0x0000f000, 0x0000b400, 0x0000e600, 0x00007300,
960 0x00009600, 0x0000ac00, 0x00007400, 0x00002200,
961 0x0000e700, 0x0000ad00, 0x00003500, 0x00008500,
962 0x0000e200, 0x0000f900, 0x00003700, 0x0000e800,
963 0x00001c00, 0x00007500, 0x0000df00, 0x00006e00,
964 0x00004700, 0x0000f100, 0x00001a00, 0x00007100,
965 0x00001d00, 0x00002900, 0x0000c500, 0x00008900,
966 0x00006f00, 0x0000b700, 0x00006200, 0x00000e00,
967 0x0000aa00, 0x00001800, 0x0000be00, 0x00001b00,
968 0x0000fc00, 0x00005600, 0x00003e00, 0x00004b00,
969 0x0000c600, 0x0000d200, 0x00007900, 0x00002000,
970 0x00009a00, 0x0000db00, 0x0000c000, 0x0000fe00,
971 0x00007800, 0x0000cd00, 0x00005a00, 0x0000f400,
972 0x00001f00, 0x0000dd00, 0x0000a800, 0x00003300,
973 0x00008800, 0x00000700, 0x0000c700, 0x00003100,
974 0x0000b100, 0x00001200, 0x00001000, 0x00005900,
975 0x00002700, 0x00008000, 0x0000ec00, 0x00005f00,
976 0x00006000, 0x00005100, 0x00007f00, 0x0000a900,
977 0x00001900, 0x0000b500, 0x00004a00, 0x00000d00,
978 0x00002d00, 0x0000e500, 0x00007a00, 0x00009f00,
979 0x00009300, 0x0000c900, 0x00009c00, 0x0000ef00,
980 0x0000a000, 0x0000e000, 0x00003b00, 0x00004d00,
981 0x0000ae00, 0x00002a00, 0x0000f500, 0x0000b000,
982 0x0000c800, 0x0000eb00, 0x0000bb00, 0x00003c00,
983 0x00008300, 0x00005300, 0x00009900, 0x00006100,
984 0x00001700, 0x00002b00, 0x00000400, 0x00007e00,
985 0x0000ba00, 0x00007700, 0x0000d600, 0x00002600,
986 0x0000e100, 0x00006900, 0x00001400, 0x00006300,
987 0x00005500, 0x00002100, 0x00000c00, 0x00007d00,
988 }, {
989 0x00520000, 0x00090000, 0x006a0000, 0x00d50000,
990 0x00300000, 0x00360000, 0x00a50000, 0x00380000,
991 0x00bf0000, 0x00400000, 0x00a30000, 0x009e0000,
992 0x00810000, 0x00f30000, 0x00d70000, 0x00fb0000,
993 0x007c0000, 0x00e30000, 0x00390000, 0x00820000,
994 0x009b0000, 0x002f0000, 0x00ff0000, 0x00870000,
995 0x00340000, 0x008e0000, 0x00430000, 0x00440000,
996 0x00c40000, 0x00de0000, 0x00e90000, 0x00cb0000,
997 0x00540000, 0x007b0000, 0x00940000, 0x00320000,
998 0x00a60000, 0x00c20000, 0x00230000, 0x003d0000,
999 0x00ee0000, 0x004c0000, 0x00950000, 0x000b0000,
1000 0x00420000, 0x00fa0000, 0x00c30000, 0x004e0000,
1001 0x00080000, 0x002e0000, 0x00a10000, 0x00660000,
1002 0x00280000, 0x00d90000, 0x00240000, 0x00b20000,
1003 0x00760000, 0x005b0000, 0x00a20000, 0x00490000,
1004 0x006d0000, 0x008b0000, 0x00d10000, 0x00250000,
1005 0x00720000, 0x00f80000, 0x00f60000, 0x00640000,
1006 0x00860000, 0x00680000, 0x00980000, 0x00160000,
1007 0x00d40000, 0x00a40000, 0x005c0000, 0x00cc0000,
1008 0x005d0000, 0x00650000, 0x00b60000, 0x00920000,
1009 0x006c0000, 0x00700000, 0x00480000, 0x00500000,
1010 0x00fd0000, 0x00ed0000, 0x00b90000, 0x00da0000,
1011 0x005e0000, 0x00150000, 0x00460000, 0x00570000,
1012 0x00a70000, 0x008d0000, 0x009d0000, 0x00840000,
1013 0x00900000, 0x00d80000, 0x00ab0000, 0x00000000,
1014 0x008c0000, 0x00bc0000, 0x00d30000, 0x000a0000,
1015 0x00f70000, 0x00e40000, 0x00580000, 0x00050000,
1016 0x00b80000, 0x00b30000, 0x00450000, 0x00060000,
1017 0x00d00000, 0x002c0000, 0x001e0000, 0x008f0000,
1018 0x00ca0000, 0x003f0000, 0x000f0000, 0x00020000,
1019 0x00c10000, 0x00af0000, 0x00bd0000, 0x00030000,
1020 0x00010000, 0x00130000, 0x008a0000, 0x006b0000,
1021 0x003a0000, 0x00910000, 0x00110000, 0x00410000,
1022 0x004f0000, 0x00670000, 0x00dc0000, 0x00ea0000,
1023 0x00970000, 0x00f20000, 0x00cf0000, 0x00ce0000,
1024 0x00f00000, 0x00b40000, 0x00e60000, 0x00730000,
1025 0x00960000, 0x00ac0000, 0x00740000, 0x00220000,
1026 0x00e70000, 0x00ad0000, 0x00350000, 0x00850000,
1027 0x00e20000, 0x00f90000, 0x00370000, 0x00e80000,
1028 0x001c0000, 0x00750000, 0x00df0000, 0x006e0000,
1029 0x00470000, 0x00f10000, 0x001a0000, 0x00710000,
1030 0x001d0000, 0x00290000, 0x00c50000, 0x00890000,
1031 0x006f0000, 0x00b70000, 0x00620000, 0x000e0000,
1032 0x00aa0000, 0x00180000, 0x00be0000, 0x001b0000,
1033 0x00fc0000, 0x00560000, 0x003e0000, 0x004b0000,
1034 0x00c60000, 0x00d20000, 0x00790000, 0x00200000,
1035 0x009a0000, 0x00db0000, 0x00c00000, 0x00fe0000,
1036 0x00780000, 0x00cd0000, 0x005a0000, 0x00f40000,
1037 0x001f0000, 0x00dd0000, 0x00a80000, 0x00330000,
1038 0x00880000, 0x00070000, 0x00c70000, 0x00310000,
1039 0x00b10000, 0x00120000, 0x00100000, 0x00590000,
1040 0x00270000, 0x00800000, 0x00ec0000, 0x005f0000,
1041 0x00600000, 0x00510000, 0x007f0000, 0x00a90000,
1042 0x00190000, 0x00b50000, 0x004a0000, 0x000d0000,
1043 0x002d0000, 0x00e50000, 0x007a0000, 0x009f0000,
1044 0x00930000, 0x00c90000, 0x009c0000, 0x00ef0000,
1045 0x00a00000, 0x00e00000, 0x003b0000, 0x004d0000,
1046 0x00ae0000, 0x002a0000, 0x00f50000, 0x00b00000,
1047 0x00c80000, 0x00eb0000, 0x00bb0000, 0x003c0000,
1048 0x00830000, 0x00530000, 0x00990000, 0x00610000,
1049 0x00170000, 0x002b0000, 0x00040000, 0x007e0000,
1050 0x00ba0000, 0x00770000, 0x00d60000, 0x00260000,
1051 0x00e10000, 0x00690000, 0x00140000, 0x00630000,
1052 0x00550000, 0x00210000, 0x000c0000, 0x007d0000,
1053 }, {
1054 0x52000000, 0x09000000, 0x6a000000, 0xd5000000,
1055 0x30000000, 0x36000000, 0xa5000000, 0x38000000,
1056 0xbf000000, 0x40000000, 0xa3000000, 0x9e000000,
1057 0x81000000, 0xf3000000, 0xd7000000, 0xfb000000,
1058 0x7c000000, 0xe3000000, 0x39000000, 0x82000000,
1059 0x9b000000, 0x2f000000, 0xff000000, 0x87000000,
1060 0x34000000, 0x8e000000, 0x43000000, 0x44000000,
1061 0xc4000000, 0xde000000, 0xe9000000, 0xcb000000,
1062 0x54000000, 0x7b000000, 0x94000000, 0x32000000,
1063 0xa6000000, 0xc2000000, 0x23000000, 0x3d000000,
1064 0xee000000, 0x4c000000, 0x95000000, 0x0b000000,
1065 0x42000000, 0xfa000000, 0xc3000000, 0x4e000000,
1066 0x08000000, 0x2e000000, 0xa1000000, 0x66000000,
1067 0x28000000, 0xd9000000, 0x24000000, 0xb2000000,
1068 0x76000000, 0x5b000000, 0xa2000000, 0x49000000,
1069 0x6d000000, 0x8b000000, 0xd1000000, 0x25000000,
1070 0x72000000, 0xf8000000, 0xf6000000, 0x64000000,
1071 0x86000000, 0x68000000, 0x98000000, 0x16000000,
1072 0xd4000000, 0xa4000000, 0x5c000000, 0xcc000000,
1073 0x5d000000, 0x65000000, 0xb6000000, 0x92000000,
1074 0x6c000000, 0x70000000, 0x48000000, 0x50000000,
1075 0xfd000000, 0xed000000, 0xb9000000, 0xda000000,
1076 0x5e000000, 0x15000000, 0x46000000, 0x57000000,
1077 0xa7000000, 0x8d000000, 0x9d000000, 0x84000000,
1078 0x90000000, 0xd8000000, 0xab000000, 0x00000000,
1079 0x8c000000, 0xbc000000, 0xd3000000, 0x0a000000,
1080 0xf7000000, 0xe4000000, 0x58000000, 0x05000000,
1081 0xb8000000, 0xb3000000, 0x45000000, 0x06000000,
1082 0xd0000000, 0x2c000000, 0x1e000000, 0x8f000000,
1083 0xca000000, 0x3f000000, 0x0f000000, 0x02000000,
1084 0xc1000000, 0xaf000000, 0xbd000000, 0x03000000,
1085 0x01000000, 0x13000000, 0x8a000000, 0x6b000000,
1086 0x3a000000, 0x91000000, 0x11000000, 0x41000000,
1087 0x4f000000, 0x67000000, 0xdc000000, 0xea000000,
1088 0x97000000, 0xf2000000, 0xcf000000, 0xce000000,
1089 0xf0000000, 0xb4000000, 0xe6000000, 0x73000000,
1090 0x96000000, 0xac000000, 0x74000000, 0x22000000,
1091 0xe7000000, 0xad000000, 0x35000000, 0x85000000,
1092 0xe2000000, 0xf9000000, 0x37000000, 0xe8000000,
1093 0x1c000000, 0x75000000, 0xdf000000, 0x6e000000,
1094 0x47000000, 0xf1000000, 0x1a000000, 0x71000000,
1095 0x1d000000, 0x29000000, 0xc5000000, 0x89000000,
1096 0x6f000000, 0xb7000000, 0x62000000, 0x0e000000,
1097 0xaa000000, 0x18000000, 0xbe000000, 0x1b000000,
1098 0xfc000000, 0x56000000, 0x3e000000, 0x4b000000,
1099 0xc6000000, 0xd2000000, 0x79000000, 0x20000000,
1100 0x9a000000, 0xdb000000, 0xc0000000, 0xfe000000,
1101 0x78000000, 0xcd000000, 0x5a000000, 0xf4000000,
1102 0x1f000000, 0xdd000000, 0xa8000000, 0x33000000,
1103 0x88000000, 0x07000000, 0xc7000000, 0x31000000,
1104 0xb1000000, 0x12000000, 0x10000000, 0x59000000,
1105 0x27000000, 0x80000000, 0xec000000, 0x5f000000,
1106 0x60000000, 0x51000000, 0x7f000000, 0xa9000000,
1107 0x19000000, 0xb5000000, 0x4a000000, 0x0d000000,
1108 0x2d000000, 0xe5000000, 0x7a000000, 0x9f000000,
1109 0x93000000, 0xc9000000, 0x9c000000, 0xef000000,
1110 0xa0000000, 0xe0000000, 0x3b000000, 0x4d000000,
1111 0xae000000, 0x2a000000, 0xf5000000, 0xb0000000,
1112 0xc8000000, 0xeb000000, 0xbb000000, 0x3c000000,
1113 0x83000000, 0x53000000, 0x99000000, 0x61000000,
1114 0x17000000, 0x2b000000, 0x04000000, 0x7e000000,
1115 0xba000000, 0x77000000, 0xd6000000, 0x26000000,
1116 0xe1000000, 0x69000000, 0x14000000, 0x63000000,
1117 0x55000000, 0x21000000, 0x0c000000, 0x7d000000,
157 } 1118 }
158} 1119};
1120
1121EXPORT_SYMBOL_GPL(crypto_ft_tab);
1122EXPORT_SYMBOL_GPL(crypto_fl_tab);
1123EXPORT_SYMBOL_GPL(crypto_it_tab);
1124EXPORT_SYMBOL_GPL(crypto_il_tab);
159 1125
160/* initialise the key schedule from the user supplied key */ 1126/* initialise the key schedule from the user supplied key */
161 1127
@@ -491,7 +1457,6 @@ static struct crypto_alg aes_alg = {
491 1457
492static int __init aes_init(void) 1458static int __init aes_init(void)
493{ 1459{
494 gen_tabs();
495 return crypto_register_alg(&aes_alg); 1460 return crypto_register_alg(&aes_alg);
496} 1461}
497 1462
diff --git a/crypto/ahash.c b/crypto/ahash.c
index 27128f2c687a..ba5292d69ebd 100644
--- a/crypto/ahash.c
+++ b/crypto/ahash.c
@@ -112,6 +112,22 @@ int crypto_hash_walk_first(struct ahash_request *req,
112} 112}
113EXPORT_SYMBOL_GPL(crypto_hash_walk_first); 113EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
114 114
115int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
116 struct crypto_hash_walk *walk,
117 struct scatterlist *sg, unsigned int len)
118{
119 walk->total = len;
120
121 if (!walk->total)
122 return 0;
123
124 walk->alignmask = crypto_hash_alignmask(hdesc->tfm);
125 walk->sg = sg;
126 walk->flags = hdesc->flags;
127
128 return hash_walk_new_entry(walk);
129}
130
115static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key, 131static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
116 unsigned int keylen) 132 unsigned int keylen)
117{ 133{
@@ -146,6 +162,26 @@ static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
146 return ahash->setkey(tfm, key, keylen); 162 return ahash->setkey(tfm, key, keylen);
147} 163}
148 164
165static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
166 unsigned int keylen)
167{
168 return -ENOSYS;
169}
170
171int crypto_ahash_import(struct ahash_request *req, const u8 *in)
172{
173 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
174 struct ahash_alg *alg = crypto_ahash_alg(tfm);
175
176 memcpy(ahash_request_ctx(req), in, crypto_ahash_reqsize(tfm));
177
178 if (alg->reinit)
179 alg->reinit(req);
180
181 return 0;
182}
183EXPORT_SYMBOL_GPL(crypto_ahash_import);
184
149static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type, 185static unsigned int crypto_ahash_ctxsize(struct crypto_alg *alg, u32 type,
150 u32 mask) 186 u32 mask)
151{ 187{
@@ -164,7 +200,7 @@ static int crypto_init_ahash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
164 crt->update = alg->update; 200 crt->update = alg->update;
165 crt->final = alg->final; 201 crt->final = alg->final;
166 crt->digest = alg->digest; 202 crt->digest = alg->digest;
167 crt->setkey = ahash_setkey; 203 crt->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey;
168 crt->digestsize = alg->digestsize; 204 crt->digestsize = alg->digestsize;
169 205
170 return 0; 206 return 0;
diff --git a/crypto/ansi_cprng.c b/crypto/ansi_cprng.c
index 72db0fd763cc..0fac8ffc2fb7 100644
--- a/crypto/ansi_cprng.c
+++ b/crypto/ansi_cprng.c
@@ -161,7 +161,7 @@ static int _get_more_prng_bytes(struct prng_context *ctx)
161 /* 161 /*
162 * Now update our DT value 162 * Now update our DT value
163 */ 163 */
164 for (i = 0; i < DEFAULT_BLK_SZ; i++) { 164 for (i = DEFAULT_BLK_SZ - 1; i >= 0; i--) {
165 ctx->DT[i] += 1; 165 ctx->DT[i] += 1;
166 if (ctx->DT[i] != 0) 166 if (ctx->DT[i] != 0)
167 break; 167 break;
@@ -223,9 +223,10 @@ remainder:
223 } 223 }
224 224
225 /* 225 /*
226 * Copy up to the next whole block size 226 * Copy any data less than an entire block
227 */ 227 */
228 if (byte_count < DEFAULT_BLK_SZ) { 228 if (byte_count < DEFAULT_BLK_SZ) {
229empty_rbuf:
229 for (; ctx->rand_data_valid < DEFAULT_BLK_SZ; 230 for (; ctx->rand_data_valid < DEFAULT_BLK_SZ;
230 ctx->rand_data_valid++) { 231 ctx->rand_data_valid++) {
231 *ptr = ctx->rand_data[ctx->rand_data_valid]; 232 *ptr = ctx->rand_data[ctx->rand_data_valid];
@@ -240,18 +241,22 @@ remainder:
240 * Now copy whole blocks 241 * Now copy whole blocks
241 */ 242 */
242 for (; byte_count >= DEFAULT_BLK_SZ; byte_count -= DEFAULT_BLK_SZ) { 243 for (; byte_count >= DEFAULT_BLK_SZ; byte_count -= DEFAULT_BLK_SZ) {
243 if (_get_more_prng_bytes(ctx) < 0) { 244 if (ctx->rand_data_valid == DEFAULT_BLK_SZ) {
244 memset(buf, 0, nbytes); 245 if (_get_more_prng_bytes(ctx) < 0) {
245 err = -EINVAL; 246 memset(buf, 0, nbytes);
246 goto done; 247 err = -EINVAL;
248 goto done;
249 }
247 } 250 }
251 if (ctx->rand_data_valid > 0)
252 goto empty_rbuf;
248 memcpy(ptr, ctx->rand_data, DEFAULT_BLK_SZ); 253 memcpy(ptr, ctx->rand_data, DEFAULT_BLK_SZ);
249 ctx->rand_data_valid += DEFAULT_BLK_SZ; 254 ctx->rand_data_valid += DEFAULT_BLK_SZ;
250 ptr += DEFAULT_BLK_SZ; 255 ptr += DEFAULT_BLK_SZ;
251 } 256 }
252 257
253 /* 258 /*
254 * Now copy any extra partial data 259 * Now go back and get any remaining partial block
255 */ 260 */
256 if (byte_count) 261 if (byte_count)
257 goto remainder; 262 goto remainder;
@@ -349,15 +354,25 @@ static int cprng_get_random(struct crypto_rng *tfm, u8 *rdata,
349 return get_prng_bytes(rdata, dlen, prng); 354 return get_prng_bytes(rdata, dlen, prng);
350} 355}
351 356
357/*
358 * This is the cprng_registered reset method the seed value is
359 * interpreted as the tuple { V KEY DT}
360 * V and KEY are required during reset, and DT is optional, detected
361 * as being present by testing the length of the seed
362 */
352static int cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen) 363static int cprng_reset(struct crypto_rng *tfm, u8 *seed, unsigned int slen)
353{ 364{
354 struct prng_context *prng = crypto_rng_ctx(tfm); 365 struct prng_context *prng = crypto_rng_ctx(tfm);
355 u8 *key = seed + DEFAULT_PRNG_KSZ; 366 u8 *key = seed + DEFAULT_BLK_SZ;
367 u8 *dt = NULL;
356 368
357 if (slen < DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ) 369 if (slen < DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ)
358 return -EINVAL; 370 return -EINVAL;
359 371
360 reset_prng_context(prng, key, DEFAULT_PRNG_KSZ, seed, NULL); 372 if (slen >= (2 * DEFAULT_BLK_SZ + DEFAULT_PRNG_KSZ))
373 dt = key + DEFAULT_PRNG_KSZ;
374
375 reset_prng_context(prng, key, DEFAULT_PRNG_KSZ, seed, dt);
361 376
362 if (prng->flags & PRNG_NEED_RESET) 377 if (prng->flags & PRNG_NEED_RESET)
363 return -EINVAL; 378 return -EINVAL;
@@ -379,7 +394,7 @@ static struct crypto_alg rng_alg = {
379 .rng = { 394 .rng = {
380 .rng_make_random = cprng_get_random, 395 .rng_make_random = cprng_get_random,
381 .rng_reset = cprng_reset, 396 .rng_reset = cprng_reset,
382 .seedsize = DEFAULT_PRNG_KSZ + DEFAULT_BLK_SZ, 397 .seedsize = DEFAULT_PRNG_KSZ + 2*DEFAULT_BLK_SZ,
383 } 398 }
384 } 399 }
385}; 400};
diff --git a/crypto/api.c b/crypto/api.c
index 0444d242e985..9975a7bd246c 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -300,8 +300,8 @@ static void crypto_exit_ops(struct crypto_tfm *tfm)
300 const struct crypto_type *type = tfm->__crt_alg->cra_type; 300 const struct crypto_type *type = tfm->__crt_alg->cra_type;
301 301
302 if (type) { 302 if (type) {
303 if (type->exit) 303 if (tfm->exit)
304 type->exit(tfm); 304 tfm->exit(tfm);
305 return; 305 return;
306 } 306 }
307 307
@@ -379,17 +379,16 @@ struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
379 if (err) 379 if (err)
380 goto out_free_tfm; 380 goto out_free_tfm;
381 381
382 if (alg->cra_init && (err = alg->cra_init(tfm))) { 382 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
383 if (err == -EAGAIN)
384 crypto_shoot_alg(alg);
385 goto cra_init_failed; 383 goto cra_init_failed;
386 }
387 384
388 goto out; 385 goto out;
389 386
390cra_init_failed: 387cra_init_failed:
391 crypto_exit_ops(tfm); 388 crypto_exit_ops(tfm);
392out_free_tfm: 389out_free_tfm:
390 if (err == -EAGAIN)
391 crypto_shoot_alg(alg);
393 kfree(tfm); 392 kfree(tfm);
394out_err: 393out_err:
395 tfm = ERR_PTR(err); 394 tfm = ERR_PTR(err);
@@ -404,6 +403,9 @@ EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
404 * @type: Type of algorithm 403 * @type: Type of algorithm
405 * @mask: Mask for type comparison 404 * @mask: Mask for type comparison
406 * 405 *
406 * This function should not be used by new algorithm types.
407 * Plesae use crypto_alloc_tfm instead.
408 *
407 * crypto_alloc_base() will first attempt to locate an already loaded 409 * crypto_alloc_base() will first attempt to locate an already loaded
408 * algorithm. If that fails and the kernel supports dynamically loadable 410 * algorithm. If that fails and the kernel supports dynamically loadable
409 * modules, it will then attempt to load a module of the same name or 411 * modules, it will then attempt to load a module of the same name or
@@ -450,6 +452,111 @@ err:
450 return ERR_PTR(err); 452 return ERR_PTR(err);
451} 453}
452EXPORT_SYMBOL_GPL(crypto_alloc_base); 454EXPORT_SYMBOL_GPL(crypto_alloc_base);
455
456struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
457 const struct crypto_type *frontend)
458{
459 char *mem;
460 struct crypto_tfm *tfm = NULL;
461 unsigned int tfmsize;
462 unsigned int total;
463 int err = -ENOMEM;
464
465 tfmsize = frontend->tfmsize;
466 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg, frontend);
467
468 mem = kzalloc(total, GFP_KERNEL);
469 if (mem == NULL)
470 goto out_err;
471
472 tfm = (struct crypto_tfm *)(mem + tfmsize);
473 tfm->__crt_alg = alg;
474
475 err = frontend->init_tfm(tfm, frontend);
476 if (err)
477 goto out_free_tfm;
478
479 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
480 goto cra_init_failed;
481
482 goto out;
483
484cra_init_failed:
485 crypto_exit_ops(tfm);
486out_free_tfm:
487 if (err == -EAGAIN)
488 crypto_shoot_alg(alg);
489 kfree(mem);
490out_err:
491 tfm = ERR_PTR(err);
492out:
493 return tfm;
494}
495EXPORT_SYMBOL_GPL(crypto_create_tfm);
496
497/*
498 * crypto_alloc_tfm - Locate algorithm and allocate transform
499 * @alg_name: Name of algorithm
500 * @frontend: Frontend algorithm type
501 * @type: Type of algorithm
502 * @mask: Mask for type comparison
503 *
504 * crypto_alloc_tfm() will first attempt to locate an already loaded
505 * algorithm. If that fails and the kernel supports dynamically loadable
506 * modules, it will then attempt to load a module of the same name or
507 * alias. If that fails it will send a query to any loaded crypto manager
508 * to construct an algorithm on the fly. A refcount is grabbed on the
509 * algorithm which is then associated with the new transform.
510 *
511 * The returned transform is of a non-determinate type. Most people
512 * should use one of the more specific allocation functions such as
513 * crypto_alloc_blkcipher.
514 *
515 * In case of error the return value is an error pointer.
516 */
517struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
518 const struct crypto_type *frontend,
519 u32 type, u32 mask)
520{
521 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
522 struct crypto_tfm *tfm;
523 int err;
524
525 type &= frontend->maskclear;
526 mask &= frontend->maskclear;
527 type |= frontend->type;
528 mask |= frontend->maskset;
529
530 lookup = frontend->lookup ?: crypto_alg_mod_lookup;
531
532 for (;;) {
533 struct crypto_alg *alg;
534
535 alg = lookup(alg_name, type, mask);
536 if (IS_ERR(alg)) {
537 err = PTR_ERR(alg);
538 goto err;
539 }
540
541 tfm = crypto_create_tfm(alg, frontend);
542 if (!IS_ERR(tfm))
543 return tfm;
544
545 crypto_mod_put(alg);
546 err = PTR_ERR(tfm);
547
548err:
549 if (err != -EAGAIN)
550 break;
551 if (signal_pending(current)) {
552 err = -EINTR;
553 break;
554 }
555 }
556
557 return ERR_PTR(err);
558}
559EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
453 560
454/* 561/*
455 * crypto_free_tfm - Free crypto transform 562 * crypto_free_tfm - Free crypto transform
@@ -469,7 +576,7 @@ void crypto_free_tfm(struct crypto_tfm *tfm)
469 alg = tfm->__crt_alg; 576 alg = tfm->__crt_alg;
470 size = sizeof(*tfm) + alg->cra_ctxsize; 577 size = sizeof(*tfm) + alg->cra_ctxsize;
471 578
472 if (alg->cra_exit) 579 if (!tfm->exit && alg->cra_exit)
473 alg->cra_exit(tfm); 580 alg->cra_exit(tfm);
474 crypto_exit_ops(tfm); 581 crypto_exit_ops(tfm);
475 crypto_mod_put(alg); 582 crypto_mod_put(alg);
diff --git a/crypto/authenc.c b/crypto/authenc.c
index fd9f06c63d76..40b6e9ec9e3a 100644
--- a/crypto/authenc.c
+++ b/crypto/authenc.c
@@ -11,6 +11,7 @@
11 */ 11 */
12 12
13#include <crypto/aead.h> 13#include <crypto/aead.h>
14#include <crypto/internal/hash.h>
14#include <crypto/internal/skcipher.h> 15#include <crypto/internal/skcipher.h>
15#include <crypto/authenc.h> 16#include <crypto/authenc.h>
16#include <crypto/scatterwalk.h> 17#include <crypto/scatterwalk.h>
@@ -431,6 +432,8 @@ static struct crypto_instance *crypto_authenc_alloc(struct rtattr **tb)
431 inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize; 432 inst->alg.cra_aead.ivsize = enc->cra_ablkcipher.ivsize;
432 inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ? 433 inst->alg.cra_aead.maxauthsize = auth->cra_type == &crypto_hash_type ?
433 auth->cra_hash.digestsize : 434 auth->cra_hash.digestsize :
435 auth->cra_type ?
436 __crypto_shash_alg(auth)->digestsize :
434 auth->cra_digest.dia_digestsize; 437 auth->cra_digest.dia_digestsize;
435 438
436 inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx); 439 inst->alg.cra_ctxsize = sizeof(struct crypto_authenc_ctx);
diff --git a/crypto/camellia.c b/crypto/camellia.c
index 493fee7e0a8b..964635d163f4 100644
--- a/crypto/camellia.c
+++ b/crypto/camellia.c
@@ -35,6 +35,8 @@
35#include <linux/init.h> 35#include <linux/init.h>
36#include <linux/kernel.h> 36#include <linux/kernel.h>
37#include <linux/module.h> 37#include <linux/module.h>
38#include <linux/bitops.h>
39#include <asm/unaligned.h>
38 40
39static const u32 camellia_sp1110[256] = { 41static const u32 camellia_sp1110[256] = {
40 0x70707000,0x82828200,0x2c2c2c00,0xececec00, 42 0x70707000,0x82828200,0x2c2c2c00,0xececec00,
@@ -335,20 +337,6 @@ static const u32 camellia_sp4404[256] = {
335/* 337/*
336 * macros 338 * macros
337 */ 339 */
338#define GETU32(v, pt) \
339 do { \
340 /* latest breed of gcc is clever enough to use move */ \
341 memcpy(&(v), (pt), 4); \
342 (v) = be32_to_cpu(v); \
343 } while(0)
344
345/* rotation right shift 1byte */
346#define ROR8(x) (((x) >> 8) + ((x) << 24))
347/* rotation left shift 1bit */
348#define ROL1(x) (((x) << 1) + ((x) >> 31))
349/* rotation left shift 1byte */
350#define ROL8(x) (((x) << 8) + ((x) >> 24))
351
352#define ROLDQ(ll, lr, rl, rr, w0, w1, bits) \ 340#define ROLDQ(ll, lr, rl, rr, w0, w1, bits) \
353 do { \ 341 do { \
354 w0 = ll; \ 342 w0 = ll; \
@@ -383,7 +371,7 @@ static const u32 camellia_sp4404[256] = {
383 ^ camellia_sp3033[(u8)(il >> 8)] \ 371 ^ camellia_sp3033[(u8)(il >> 8)] \
384 ^ camellia_sp4404[(u8)(il )]; \ 372 ^ camellia_sp4404[(u8)(il )]; \
385 yl ^= yr; \ 373 yl ^= yr; \
386 yr = ROR8(yr); \ 374 yr = ror32(yr, 8); \
387 yr ^= yl; \ 375 yr ^= yl; \
388 } while(0) 376 } while(0)
389 377
@@ -405,7 +393,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
405 subL[7] ^= subL[1]; subR[7] ^= subR[1]; 393 subL[7] ^= subL[1]; subR[7] ^= subR[1];
406 subL[1] ^= subR[1] & ~subR[9]; 394 subL[1] ^= subR[1] & ~subR[9];
407 dw = subL[1] & subL[9], 395 dw = subL[1] & subL[9],
408 subR[1] ^= ROL1(dw); /* modified for FLinv(kl2) */ 396 subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl2) */
409 /* round 8 */ 397 /* round 8 */
410 subL[11] ^= subL[1]; subR[11] ^= subR[1]; 398 subL[11] ^= subL[1]; subR[11] ^= subR[1];
411 /* round 10 */ 399 /* round 10 */
@@ -414,7 +402,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
414 subL[15] ^= subL[1]; subR[15] ^= subR[1]; 402 subL[15] ^= subL[1]; subR[15] ^= subR[1];
415 subL[1] ^= subR[1] & ~subR[17]; 403 subL[1] ^= subR[1] & ~subR[17];
416 dw = subL[1] & subL[17], 404 dw = subL[1] & subL[17],
417 subR[1] ^= ROL1(dw); /* modified for FLinv(kl4) */ 405 subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl4) */
418 /* round 14 */ 406 /* round 14 */
419 subL[19] ^= subL[1]; subR[19] ^= subR[1]; 407 subL[19] ^= subL[1]; subR[19] ^= subR[1];
420 /* round 16 */ 408 /* round 16 */
@@ -430,7 +418,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
430 } else { 418 } else {
431 subL[1] ^= subR[1] & ~subR[25]; 419 subL[1] ^= subR[1] & ~subR[25];
432 dw = subL[1] & subL[25], 420 dw = subL[1] & subL[25],
433 subR[1] ^= ROL1(dw); /* modified for FLinv(kl6) */ 421 subR[1] ^= rol32(dw, 1); /* modified for FLinv(kl6) */
434 /* round 20 */ 422 /* round 20 */
435 subL[27] ^= subL[1]; subR[27] ^= subR[1]; 423 subL[27] ^= subL[1]; subR[27] ^= subR[1];
436 /* round 22 */ 424 /* round 22 */
@@ -450,7 +438,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
450 subL[26] ^= kw4l; subR[26] ^= kw4r; 438 subL[26] ^= kw4l; subR[26] ^= kw4r;
451 kw4l ^= kw4r & ~subR[24]; 439 kw4l ^= kw4r & ~subR[24];
452 dw = kw4l & subL[24], 440 dw = kw4l & subL[24],
453 kw4r ^= ROL1(dw); /* modified for FL(kl5) */ 441 kw4r ^= rol32(dw, 1); /* modified for FL(kl5) */
454 } 442 }
455 /* round 17 */ 443 /* round 17 */
456 subL[22] ^= kw4l; subR[22] ^= kw4r; 444 subL[22] ^= kw4l; subR[22] ^= kw4r;
@@ -460,7 +448,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
460 subL[18] ^= kw4l; subR[18] ^= kw4r; 448 subL[18] ^= kw4l; subR[18] ^= kw4r;
461 kw4l ^= kw4r & ~subR[16]; 449 kw4l ^= kw4r & ~subR[16];
462 dw = kw4l & subL[16], 450 dw = kw4l & subL[16],
463 kw4r ^= ROL1(dw); /* modified for FL(kl3) */ 451 kw4r ^= rol32(dw, 1); /* modified for FL(kl3) */
464 /* round 11 */ 452 /* round 11 */
465 subL[14] ^= kw4l; subR[14] ^= kw4r; 453 subL[14] ^= kw4l; subR[14] ^= kw4r;
466 /* round 9 */ 454 /* round 9 */
@@ -469,7 +457,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
469 subL[10] ^= kw4l; subR[10] ^= kw4r; 457 subL[10] ^= kw4l; subR[10] ^= kw4r;
470 kw4l ^= kw4r & ~subR[8]; 458 kw4l ^= kw4r & ~subR[8];
471 dw = kw4l & subL[8], 459 dw = kw4l & subL[8],
472 kw4r ^= ROL1(dw); /* modified for FL(kl1) */ 460 kw4r ^= rol32(dw, 1); /* modified for FL(kl1) */
473 /* round 5 */ 461 /* round 5 */
474 subL[6] ^= kw4l; subR[6] ^= kw4r; 462 subL[6] ^= kw4l; subR[6] ^= kw4r;
475 /* round 3 */ 463 /* round 3 */
@@ -494,7 +482,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
494 SUBKEY_R(6) = subR[5] ^ subR[7]; 482 SUBKEY_R(6) = subR[5] ^ subR[7];
495 tl = subL[10] ^ (subR[10] & ~subR[8]); 483 tl = subL[10] ^ (subR[10] & ~subR[8]);
496 dw = tl & subL[8], /* FL(kl1) */ 484 dw = tl & subL[8], /* FL(kl1) */
497 tr = subR[10] ^ ROL1(dw); 485 tr = subR[10] ^ rol32(dw, 1);
498 SUBKEY_L(7) = subL[6] ^ tl; /* round 6 */ 486 SUBKEY_L(7) = subL[6] ^ tl; /* round 6 */
499 SUBKEY_R(7) = subR[6] ^ tr; 487 SUBKEY_R(7) = subR[6] ^ tr;
500 SUBKEY_L(8) = subL[8]; /* FL(kl1) */ 488 SUBKEY_L(8) = subL[8]; /* FL(kl1) */
@@ -503,7 +491,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
503 SUBKEY_R(9) = subR[9]; 491 SUBKEY_R(9) = subR[9];
504 tl = subL[7] ^ (subR[7] & ~subR[9]); 492 tl = subL[7] ^ (subR[7] & ~subR[9]);
505 dw = tl & subL[9], /* FLinv(kl2) */ 493 dw = tl & subL[9], /* FLinv(kl2) */
506 tr = subR[7] ^ ROL1(dw); 494 tr = subR[7] ^ rol32(dw, 1);
507 SUBKEY_L(10) = tl ^ subL[11]; /* round 7 */ 495 SUBKEY_L(10) = tl ^ subL[11]; /* round 7 */
508 SUBKEY_R(10) = tr ^ subR[11]; 496 SUBKEY_R(10) = tr ^ subR[11];
509 SUBKEY_L(11) = subL[10] ^ subL[12]; /* round 8 */ 497 SUBKEY_L(11) = subL[10] ^ subL[12]; /* round 8 */
@@ -516,7 +504,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
516 SUBKEY_R(14) = subR[13] ^ subR[15]; 504 SUBKEY_R(14) = subR[13] ^ subR[15];
517 tl = subL[18] ^ (subR[18] & ~subR[16]); 505 tl = subL[18] ^ (subR[18] & ~subR[16]);
518 dw = tl & subL[16], /* FL(kl3) */ 506 dw = tl & subL[16], /* FL(kl3) */
519 tr = subR[18] ^ ROL1(dw); 507 tr = subR[18] ^ rol32(dw, 1);
520 SUBKEY_L(15) = subL[14] ^ tl; /* round 12 */ 508 SUBKEY_L(15) = subL[14] ^ tl; /* round 12 */
521 SUBKEY_R(15) = subR[14] ^ tr; 509 SUBKEY_R(15) = subR[14] ^ tr;
522 SUBKEY_L(16) = subL[16]; /* FL(kl3) */ 510 SUBKEY_L(16) = subL[16]; /* FL(kl3) */
@@ -525,7 +513,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
525 SUBKEY_R(17) = subR[17]; 513 SUBKEY_R(17) = subR[17];
526 tl = subL[15] ^ (subR[15] & ~subR[17]); 514 tl = subL[15] ^ (subR[15] & ~subR[17]);
527 dw = tl & subL[17], /* FLinv(kl4) */ 515 dw = tl & subL[17], /* FLinv(kl4) */
528 tr = subR[15] ^ ROL1(dw); 516 tr = subR[15] ^ rol32(dw, 1);
529 SUBKEY_L(18) = tl ^ subL[19]; /* round 13 */ 517 SUBKEY_L(18) = tl ^ subL[19]; /* round 13 */
530 SUBKEY_R(18) = tr ^ subR[19]; 518 SUBKEY_R(18) = tr ^ subR[19];
531 SUBKEY_L(19) = subL[18] ^ subL[20]; /* round 14 */ 519 SUBKEY_L(19) = subL[18] ^ subL[20]; /* round 14 */
@@ -544,7 +532,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
544 } else { 532 } else {
545 tl = subL[26] ^ (subR[26] & ~subR[24]); 533 tl = subL[26] ^ (subR[26] & ~subR[24]);
546 dw = tl & subL[24], /* FL(kl5) */ 534 dw = tl & subL[24], /* FL(kl5) */
547 tr = subR[26] ^ ROL1(dw); 535 tr = subR[26] ^ rol32(dw, 1);
548 SUBKEY_L(23) = subL[22] ^ tl; /* round 18 */ 536 SUBKEY_L(23) = subL[22] ^ tl; /* round 18 */
549 SUBKEY_R(23) = subR[22] ^ tr; 537 SUBKEY_R(23) = subR[22] ^ tr;
550 SUBKEY_L(24) = subL[24]; /* FL(kl5) */ 538 SUBKEY_L(24) = subL[24]; /* FL(kl5) */
@@ -553,7 +541,7 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
553 SUBKEY_R(25) = subR[25]; 541 SUBKEY_R(25) = subR[25];
554 tl = subL[23] ^ (subR[23] & ~subR[25]); 542 tl = subL[23] ^ (subR[23] & ~subR[25]);
555 dw = tl & subL[25], /* FLinv(kl6) */ 543 dw = tl & subL[25], /* FLinv(kl6) */
556 tr = subR[23] ^ ROL1(dw); 544 tr = subR[23] ^ rol32(dw, 1);
557 SUBKEY_L(26) = tl ^ subL[27]; /* round 19 */ 545 SUBKEY_L(26) = tl ^ subL[27]; /* round 19 */
558 SUBKEY_R(26) = tr ^ subR[27]; 546 SUBKEY_R(26) = tr ^ subR[27];
559 SUBKEY_L(27) = subL[26] ^ subL[28]; /* round 20 */ 547 SUBKEY_L(27) = subL[26] ^ subL[28]; /* round 20 */
@@ -573,17 +561,17 @@ static void camellia_setup_tail(u32 *subkey, u32 *subL, u32 *subR, int max)
573 /* apply the inverse of the last half of P-function */ 561 /* apply the inverse of the last half of P-function */
574 i = 2; 562 i = 2;
575 do { 563 do {
576 dw = SUBKEY_L(i + 0) ^ SUBKEY_R(i + 0); dw = ROL8(dw);/* round 1 */ 564 dw = SUBKEY_L(i + 0) ^ SUBKEY_R(i + 0); dw = rol32(dw, 8);/* round 1 */
577 SUBKEY_R(i + 0) = SUBKEY_L(i + 0) ^ dw; SUBKEY_L(i + 0) = dw; 565 SUBKEY_R(i + 0) = SUBKEY_L(i + 0) ^ dw; SUBKEY_L(i + 0) = dw;
578 dw = SUBKEY_L(i + 1) ^ SUBKEY_R(i + 1); dw = ROL8(dw);/* round 2 */ 566 dw = SUBKEY_L(i + 1) ^ SUBKEY_R(i + 1); dw = rol32(dw, 8);/* round 2 */
579 SUBKEY_R(i + 1) = SUBKEY_L(i + 1) ^ dw; SUBKEY_L(i + 1) = dw; 567 SUBKEY_R(i + 1) = SUBKEY_L(i + 1) ^ dw; SUBKEY_L(i + 1) = dw;
580 dw = SUBKEY_L(i + 2) ^ SUBKEY_R(i + 2); dw = ROL8(dw);/* round 3 */ 568 dw = SUBKEY_L(i + 2) ^ SUBKEY_R(i + 2); dw = rol32(dw, 8);/* round 3 */
581 SUBKEY_R(i + 2) = SUBKEY_L(i + 2) ^ dw; SUBKEY_L(i + 2) = dw; 569 SUBKEY_R(i + 2) = SUBKEY_L(i + 2) ^ dw; SUBKEY_L(i + 2) = dw;
582 dw = SUBKEY_L(i + 3) ^ SUBKEY_R(i + 3); dw = ROL8(dw);/* round 4 */ 570 dw = SUBKEY_L(i + 3) ^ SUBKEY_R(i + 3); dw = rol32(dw, 8);/* round 4 */
583 SUBKEY_R(i + 3) = SUBKEY_L(i + 3) ^ dw; SUBKEY_L(i + 3) = dw; 571 SUBKEY_R(i + 3) = SUBKEY_L(i + 3) ^ dw; SUBKEY_L(i + 3) = dw;
584 dw = SUBKEY_L(i + 4) ^ SUBKEY_R(i + 4); dw = ROL8(dw);/* round 5 */ 572 dw = SUBKEY_L(i + 4) ^ SUBKEY_R(i + 4); dw = rol32(dw, 8);/* round 5 */
585 SUBKEY_R(i + 4) = SUBKEY_L(i + 4) ^ dw; SUBKEY_L(i + 4) = dw; 573 SUBKEY_R(i + 4) = SUBKEY_L(i + 4) ^ dw; SUBKEY_L(i + 4) = dw;
586 dw = SUBKEY_L(i + 5) ^ SUBKEY_R(i + 5); dw = ROL8(dw);/* round 6 */ 574 dw = SUBKEY_L(i + 5) ^ SUBKEY_R(i + 5); dw = rol32(dw, 8);/* round 6 */
587 SUBKEY_R(i + 5) = SUBKEY_L(i + 5) ^ dw; SUBKEY_L(i + 5) = dw; 575 SUBKEY_R(i + 5) = SUBKEY_L(i + 5) ^ dw; SUBKEY_L(i + 5) = dw;
588 i += 8; 576 i += 8;
589 } while (i < max); 577 } while (i < max);
@@ -599,10 +587,10 @@ static void camellia_setup128(const unsigned char *key, u32 *subkey)
599 /** 587 /**
600 * k == kll || klr || krl || krr (|| is concatenation) 588 * k == kll || klr || krl || krr (|| is concatenation)
601 */ 589 */
602 GETU32(kll, key ); 590 kll = get_unaligned_be32(key);
603 GETU32(klr, key + 4); 591 klr = get_unaligned_be32(key + 4);
604 GETU32(krl, key + 8); 592 krl = get_unaligned_be32(key + 8);
605 GETU32(krr, key + 12); 593 krr = get_unaligned_be32(key + 12);
606 594
607 /* generate KL dependent subkeys */ 595 /* generate KL dependent subkeys */
608 /* kw1 */ 596 /* kw1 */
@@ -707,14 +695,14 @@ static void camellia_setup256(const unsigned char *key, u32 *subkey)
707 * key = (kll || klr || krl || krr || krll || krlr || krrl || krrr) 695 * key = (kll || klr || krl || krr || krll || krlr || krrl || krrr)
708 * (|| is concatenation) 696 * (|| is concatenation)
709 */ 697 */
710 GETU32(kll, key ); 698 kll = get_unaligned_be32(key);
711 GETU32(klr, key + 4); 699 klr = get_unaligned_be32(key + 4);
712 GETU32(krl, key + 8); 700 krl = get_unaligned_be32(key + 8);
713 GETU32(krr, key + 12); 701 krr = get_unaligned_be32(key + 12);
714 GETU32(krll, key + 16); 702 krll = get_unaligned_be32(key + 16);
715 GETU32(krlr, key + 20); 703 krlr = get_unaligned_be32(key + 20);
716 GETU32(krrl, key + 24); 704 krrl = get_unaligned_be32(key + 24);
717 GETU32(krrr, key + 28); 705 krrr = get_unaligned_be32(key + 28);
718 706
719 /* generate KL dependent subkeys */ 707 /* generate KL dependent subkeys */
720 /* kw1 */ 708 /* kw1 */
@@ -870,13 +858,13 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
870 t0 &= ll; \ 858 t0 &= ll; \
871 t2 |= rr; \ 859 t2 |= rr; \
872 rl ^= t2; \ 860 rl ^= t2; \
873 lr ^= ROL1(t0); \ 861 lr ^= rol32(t0, 1); \
874 t3 = krl; \ 862 t3 = krl; \
875 t1 = klr; \ 863 t1 = klr; \
876 t3 &= rl; \ 864 t3 &= rl; \
877 t1 |= lr; \ 865 t1 |= lr; \
878 ll ^= t1; \ 866 ll ^= t1; \
879 rr ^= ROL1(t3); \ 867 rr ^= rol32(t3, 1); \
880 } while(0) 868 } while(0)
881 869
882#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir) \ 870#define CAMELLIA_ROUNDSM(xl, xr, kl, kr, yl, yr, il, ir) \
@@ -892,7 +880,7 @@ static void camellia_setup192(const unsigned char *key, u32 *subkey)
892 il ^= kl; \ 880 il ^= kl; \
893 ir ^= il ^ kr; \ 881 ir ^= il ^ kr; \
894 yl ^= ir; \ 882 yl ^= ir; \
895 yr ^= ROR8(il) ^ ir; \ 883 yr ^= ror32(il, 8) ^ ir; \
896 } while(0) 884 } while(0)
897 885
898/* max = 24: 128bit encrypt, max = 32: 256bit encrypt */ 886/* max = 24: 128bit encrypt, max = 32: 256bit encrypt */
diff --git a/crypto/crc32c.c b/crypto/crc32c.c
index a882d9e4e63e..973bc2cfab2e 100644
--- a/crypto/crc32c.c
+++ b/crypto/crc32c.c
@@ -3,8 +3,29 @@
3 * 3 *
4 * CRC32C chksum 4 * CRC32C chksum
5 * 5 *
6 * This module file is a wrapper to invoke the lib/crc32c routines. 6 *@Article{castagnoli-crc,
7 * author = { Guy Castagnoli and Stefan Braeuer and Martin Herrman},
8 * title = {{Optimization of Cyclic Redundancy-Check Codes with 24
9 * and 32 Parity Bits}},
10 * journal = IEEE Transactions on Communication,
11 * year = {1993},
12 * volume = {41},
13 * number = {6},
14 * pages = {},
15 * month = {June},
16 *}
17 * Used by the iSCSI driver, possibly others, and derived from the
18 * the iscsi-crc.c module of the linux-iscsi driver at
19 * http://linux-iscsi.sourceforge.net.
7 * 20 *
21 * Following the example of lib/crc32, this function is intended to be
22 * flexible and useful for all users. Modules that currently have their
23 * own crc32c, but hopefully may be able to use this one are:
24 * net/sctp (please add all your doco to here if you change to
25 * use this one!)
26 * <endoflist>
27 *
28 * Copyright (c) 2004 Cisco Systems, Inc.
8 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 29 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * 30 *
10 * This program is free software; you can redistribute it and/or modify it 31 * This program is free software; you can redistribute it and/or modify it
@@ -18,208 +39,217 @@
18#include <linux/init.h> 39#include <linux/init.h>
19#include <linux/module.h> 40#include <linux/module.h>
20#include <linux/string.h> 41#include <linux/string.h>
21#include <linux/crc32c.h>
22#include <linux/kernel.h> 42#include <linux/kernel.h>
23 43
24#define CHKSUM_BLOCK_SIZE 1 44#define CHKSUM_BLOCK_SIZE 1
25#define CHKSUM_DIGEST_SIZE 4 45#define CHKSUM_DIGEST_SIZE 4
26 46
27struct chksum_ctx { 47struct chksum_ctx {
28 u32 crc;
29 u32 key; 48 u32 key;
30}; 49};
31 50
51struct chksum_desc_ctx {
52 u32 crc;
53};
54
32/* 55/*
33 * Steps through buffer one byte at at time, calculates reflected 56 * This is the CRC-32C table
34 * crc using table. 57 * Generated with:
58 * width = 32 bits
59 * poly = 0x1EDC6F41
60 * reflect input bytes = true
61 * reflect output bytes = true
35 */ 62 */
36 63
37static void chksum_init(struct crypto_tfm *tfm) 64static const u32 crc32c_table[256] = {
38{ 65 0x00000000L, 0xF26B8303L, 0xE13B70F7L, 0x1350F3F4L,
39 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); 66 0xC79A971FL, 0x35F1141CL, 0x26A1E7E8L, 0xD4CA64EBL,
40 67 0x8AD958CFL, 0x78B2DBCCL, 0x6BE22838L, 0x9989AB3BL,
41 mctx->crc = mctx->key; 68 0x4D43CFD0L, 0xBF284CD3L, 0xAC78BF27L, 0x5E133C24L,
42} 69 0x105EC76FL, 0xE235446CL, 0xF165B798L, 0x030E349BL,
70 0xD7C45070L, 0x25AFD373L, 0x36FF2087L, 0xC494A384L,
71 0x9A879FA0L, 0x68EC1CA3L, 0x7BBCEF57L, 0x89D76C54L,
72 0x5D1D08BFL, 0xAF768BBCL, 0xBC267848L, 0x4E4DFB4BL,
73 0x20BD8EDEL, 0xD2D60DDDL, 0xC186FE29L, 0x33ED7D2AL,
74 0xE72719C1L, 0x154C9AC2L, 0x061C6936L, 0xF477EA35L,
75 0xAA64D611L, 0x580F5512L, 0x4B5FA6E6L, 0xB93425E5L,
76 0x6DFE410EL, 0x9F95C20DL, 0x8CC531F9L, 0x7EAEB2FAL,
77 0x30E349B1L, 0xC288CAB2L, 0xD1D83946L, 0x23B3BA45L,
78 0xF779DEAEL, 0x05125DADL, 0x1642AE59L, 0xE4292D5AL,
79 0xBA3A117EL, 0x4851927DL, 0x5B016189L, 0xA96AE28AL,
80 0x7DA08661L, 0x8FCB0562L, 0x9C9BF696L, 0x6EF07595L,
81 0x417B1DBCL, 0xB3109EBFL, 0xA0406D4BL, 0x522BEE48L,
82 0x86E18AA3L, 0x748A09A0L, 0x67DAFA54L, 0x95B17957L,
83 0xCBA24573L, 0x39C9C670L, 0x2A993584L, 0xD8F2B687L,
84 0x0C38D26CL, 0xFE53516FL, 0xED03A29BL, 0x1F682198L,
85 0x5125DAD3L, 0xA34E59D0L, 0xB01EAA24L, 0x42752927L,
86 0x96BF4DCCL, 0x64D4CECFL, 0x77843D3BL, 0x85EFBE38L,
87 0xDBFC821CL, 0x2997011FL, 0x3AC7F2EBL, 0xC8AC71E8L,
88 0x1C661503L, 0xEE0D9600L, 0xFD5D65F4L, 0x0F36E6F7L,
89 0x61C69362L, 0x93AD1061L, 0x80FDE395L, 0x72966096L,
90 0xA65C047DL, 0x5437877EL, 0x4767748AL, 0xB50CF789L,
91 0xEB1FCBADL, 0x197448AEL, 0x0A24BB5AL, 0xF84F3859L,
92 0x2C855CB2L, 0xDEEEDFB1L, 0xCDBE2C45L, 0x3FD5AF46L,
93 0x7198540DL, 0x83F3D70EL, 0x90A324FAL, 0x62C8A7F9L,
94 0xB602C312L, 0x44694011L, 0x5739B3E5L, 0xA55230E6L,
95 0xFB410CC2L, 0x092A8FC1L, 0x1A7A7C35L, 0xE811FF36L,
96 0x3CDB9BDDL, 0xCEB018DEL, 0xDDE0EB2AL, 0x2F8B6829L,
97 0x82F63B78L, 0x709DB87BL, 0x63CD4B8FL, 0x91A6C88CL,
98 0x456CAC67L, 0xB7072F64L, 0xA457DC90L, 0x563C5F93L,
99 0x082F63B7L, 0xFA44E0B4L, 0xE9141340L, 0x1B7F9043L,
100 0xCFB5F4A8L, 0x3DDE77ABL, 0x2E8E845FL, 0xDCE5075CL,
101 0x92A8FC17L, 0x60C37F14L, 0x73938CE0L, 0x81F80FE3L,
102 0x55326B08L, 0xA759E80BL, 0xB4091BFFL, 0x466298FCL,
103 0x1871A4D8L, 0xEA1A27DBL, 0xF94AD42FL, 0x0B21572CL,
104 0xDFEB33C7L, 0x2D80B0C4L, 0x3ED04330L, 0xCCBBC033L,
105 0xA24BB5A6L, 0x502036A5L, 0x4370C551L, 0xB11B4652L,
106 0x65D122B9L, 0x97BAA1BAL, 0x84EA524EL, 0x7681D14DL,
107 0x2892ED69L, 0xDAF96E6AL, 0xC9A99D9EL, 0x3BC21E9DL,
108 0xEF087A76L, 0x1D63F975L, 0x0E330A81L, 0xFC588982L,
109 0xB21572C9L, 0x407EF1CAL, 0x532E023EL, 0xA145813DL,
110 0x758FE5D6L, 0x87E466D5L, 0x94B49521L, 0x66DF1622L,
111 0x38CC2A06L, 0xCAA7A905L, 0xD9F75AF1L, 0x2B9CD9F2L,
112 0xFF56BD19L, 0x0D3D3E1AL, 0x1E6DCDEEL, 0xEC064EEDL,
113 0xC38D26C4L, 0x31E6A5C7L, 0x22B65633L, 0xD0DDD530L,
114 0x0417B1DBL, 0xF67C32D8L, 0xE52CC12CL, 0x1747422FL,
115 0x49547E0BL, 0xBB3FFD08L, 0xA86F0EFCL, 0x5A048DFFL,
116 0x8ECEE914L, 0x7CA56A17L, 0x6FF599E3L, 0x9D9E1AE0L,
117 0xD3D3E1ABL, 0x21B862A8L, 0x32E8915CL, 0xC083125FL,
118 0x144976B4L, 0xE622F5B7L, 0xF5720643L, 0x07198540L,
119 0x590AB964L, 0xAB613A67L, 0xB831C993L, 0x4A5A4A90L,
120 0x9E902E7BL, 0x6CFBAD78L, 0x7FAB5E8CL, 0x8DC0DD8FL,
121 0xE330A81AL, 0x115B2B19L, 0x020BD8EDL, 0xF0605BEEL,
122 0x24AA3F05L, 0xD6C1BC06L, 0xC5914FF2L, 0x37FACCF1L,
123 0x69E9F0D5L, 0x9B8273D6L, 0x88D28022L, 0x7AB90321L,
124 0xAE7367CAL, 0x5C18E4C9L, 0x4F48173DL, 0xBD23943EL,
125 0xF36E6F75L, 0x0105EC76L, 0x12551F82L, 0xE03E9C81L,
126 0x34F4F86AL, 0xC69F7B69L, 0xD5CF889DL, 0x27A40B9EL,
127 0x79B737BAL, 0x8BDCB4B9L, 0x988C474DL, 0x6AE7C44EL,
128 0xBE2DA0A5L, 0x4C4623A6L, 0x5F16D052L, 0xAD7D5351L
129};
43 130
44/* 131/*
45 * Setting the seed allows arbitrary accumulators and flexible XOR policy 132 * Steps through buffer one byte at at time, calculates reflected
46 * If your algorithm starts with ~0, then XOR with ~0 before you set 133 * crc using table.
47 * the seed.
48 */ 134 */
49static int chksum_setkey(struct crypto_tfm *tfm, const u8 *key,
50 unsigned int keylen)
51{
52 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
53 135
54 if (keylen != sizeof(mctx->crc)) { 136static u32 crc32c(u32 crc, const u8 *data, unsigned int length)
55 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
56 return -EINVAL;
57 }
58 mctx->key = le32_to_cpu(*(__le32 *)key);
59 return 0;
60}
61
62static void chksum_update(struct crypto_tfm *tfm, const u8 *data,
63 unsigned int length)
64{ 137{
65 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); 138 while (length--)
139 crc = crc32c_table[(crc ^ *data++) & 0xFFL] ^ (crc >> 8);
66 140
67 mctx->crc = crc32c(mctx->crc, data, length); 141 return crc;
68} 142}
69 143
70static void chksum_final(struct crypto_tfm *tfm, u8 *out) 144/*
71{ 145 * Steps through buffer one byte at at time, calculates reflected
72 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); 146 * crc using table.
73 147 */
74 *(__le32 *)out = ~cpu_to_le32(mctx->crc);
75}
76 148
77static int crc32c_cra_init_old(struct crypto_tfm *tfm) 149static int chksum_init(struct shash_desc *desc)
78{ 150{
79 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm); 151 struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm);
152 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
153
154 ctx->crc = mctx->key;
80 155
81 mctx->key = ~0;
82 return 0; 156 return 0;
83} 157}
84 158
85static struct crypto_alg old_alg = {
86 .cra_name = "crc32c",
87 .cra_flags = CRYPTO_ALG_TYPE_DIGEST,
88 .cra_blocksize = CHKSUM_BLOCK_SIZE,
89 .cra_ctxsize = sizeof(struct chksum_ctx),
90 .cra_module = THIS_MODULE,
91 .cra_list = LIST_HEAD_INIT(old_alg.cra_list),
92 .cra_init = crc32c_cra_init_old,
93 .cra_u = {
94 .digest = {
95 .dia_digestsize= CHKSUM_DIGEST_SIZE,
96 .dia_setkey = chksum_setkey,
97 .dia_init = chksum_init,
98 .dia_update = chksum_update,
99 .dia_final = chksum_final
100 }
101 }
102};
103
104/* 159/*
105 * Setting the seed allows arbitrary accumulators and flexible XOR policy 160 * Setting the seed allows arbitrary accumulators and flexible XOR policy
106 * If your algorithm starts with ~0, then XOR with ~0 before you set 161 * If your algorithm starts with ~0, then XOR with ~0 before you set
107 * the seed. 162 * the seed.
108 */ 163 */
109static int crc32c_setkey(struct crypto_ahash *hash, const u8 *key, 164static int chksum_setkey(struct crypto_shash *tfm, const u8 *key,
110 unsigned int keylen) 165 unsigned int keylen)
111{ 166{
112 u32 *mctx = crypto_ahash_ctx(hash); 167 struct chksum_ctx *mctx = crypto_shash_ctx(tfm);
113 168
114 if (keylen != sizeof(u32)) { 169 if (keylen != sizeof(mctx->key)) {
115 crypto_ahash_set_flags(hash, CRYPTO_TFM_RES_BAD_KEY_LEN); 170 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
116 return -EINVAL; 171 return -EINVAL;
117 } 172 }
118 *mctx = le32_to_cpup((__le32 *)key); 173 mctx->key = le32_to_cpu(*(__le32 *)key);
119 return 0; 174 return 0;
120} 175}
121 176
122static int crc32c_init(struct ahash_request *req) 177static int chksum_update(struct shash_desc *desc, const u8 *data,
178 unsigned int length)
123{ 179{
124 u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 180 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
125 u32 *crcp = ahash_request_ctx(req);
126 181
127 *crcp = *mctx; 182 ctx->crc = crc32c(ctx->crc, data, length);
128 return 0; 183 return 0;
129} 184}
130 185
131static int crc32c_update(struct ahash_request *req) 186static int chksum_final(struct shash_desc *desc, u8 *out)
132{ 187{
133 struct crypto_hash_walk walk; 188 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
134 u32 *crcp = ahash_request_ctx(req);
135 u32 crc = *crcp;
136 int nbytes;
137
138 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes;
139 nbytes = crypto_hash_walk_done(&walk, 0))
140 crc = crc32c(crc, walk.data, nbytes);
141 189
142 *crcp = crc; 190 *(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
143 return 0; 191 return 0;
144} 192}
145 193
146static int crc32c_final(struct ahash_request *req) 194static int __chksum_finup(u32 *crcp, const u8 *data, unsigned int len, u8 *out)
147{ 195{
148 u32 *crcp = ahash_request_ctx(req); 196 *(__le32 *)out = ~cpu_to_le32(crc32c(*crcp, data, len));
149
150 *(__le32 *)req->result = ~cpu_to_le32p(crcp);
151 return 0; 197 return 0;
152} 198}
153 199
154static int crc32c_digest(struct ahash_request *req) 200static int chksum_finup(struct shash_desc *desc, const u8 *data,
201 unsigned int len, u8 *out)
155{ 202{
156 struct crypto_hash_walk walk; 203 struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
157 u32 *mctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
158 u32 crc = *mctx;
159 int nbytes;
160 204
161 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes; 205 return __chksum_finup(&ctx->crc, data, len, out);
162 nbytes = crypto_hash_walk_done(&walk, 0))
163 crc = crc32c(crc, walk.data, nbytes);
164
165 *(__le32 *)req->result = ~cpu_to_le32(crc);
166 return 0;
167} 206}
168 207
169static int crc32c_cra_init(struct crypto_tfm *tfm) 208static int chksum_digest(struct shash_desc *desc, const u8 *data,
209 unsigned int length, u8 *out)
170{ 210{
171 u32 *key = crypto_tfm_ctx(tfm); 211 struct chksum_ctx *mctx = crypto_shash_ctx(desc->tfm);
172 212
173 *key = ~0; 213 return __chksum_finup(&mctx->key, data, length, out);
214}
174 215
175 tfm->crt_ahash.reqsize = sizeof(u32); 216static int crc32c_cra_init(struct crypto_tfm *tfm)
217{
218 struct chksum_ctx *mctx = crypto_tfm_ctx(tfm);
176 219
220 mctx->key = ~0;
177 return 0; 221 return 0;
178} 222}
179 223
180static struct crypto_alg alg = { 224static struct shash_alg alg = {
181 .cra_name = "crc32c", 225 .digestsize = CHKSUM_DIGEST_SIZE,
182 .cra_driver_name = "crc32c-generic", 226 .setkey = chksum_setkey,
183 .cra_priority = 100, 227 .init = chksum_init,
184 .cra_flags = CRYPTO_ALG_TYPE_AHASH, 228 .update = chksum_update,
185 .cra_blocksize = CHKSUM_BLOCK_SIZE, 229 .final = chksum_final,
186 .cra_alignmask = 3, 230 .finup = chksum_finup,
187 .cra_ctxsize = sizeof(u32), 231 .digest = chksum_digest,
188 .cra_module = THIS_MODULE, 232 .descsize = sizeof(struct chksum_desc_ctx),
189 .cra_list = LIST_HEAD_INIT(alg.cra_list), 233 .base = {
190 .cra_init = crc32c_cra_init, 234 .cra_name = "crc32c",
191 .cra_type = &crypto_ahash_type, 235 .cra_driver_name = "crc32c-generic",
192 .cra_u = { 236 .cra_priority = 100,
193 .ahash = { 237 .cra_blocksize = CHKSUM_BLOCK_SIZE,
194 .digestsize = CHKSUM_DIGEST_SIZE, 238 .cra_alignmask = 3,
195 .setkey = crc32c_setkey, 239 .cra_ctxsize = sizeof(struct chksum_ctx),
196 .init = crc32c_init, 240 .cra_module = THIS_MODULE,
197 .update = crc32c_update, 241 .cra_init = crc32c_cra_init,
198 .final = crc32c_final,
199 .digest = crc32c_digest,
200 }
201 } 242 }
202}; 243};
203 244
204static int __init crc32c_mod_init(void) 245static int __init crc32c_mod_init(void)
205{ 246{
206 int err; 247 return crypto_register_shash(&alg);
207
208 err = crypto_register_alg(&old_alg);
209 if (err)
210 return err;
211
212 err = crypto_register_alg(&alg);
213 if (err)
214 crypto_unregister_alg(&old_alg);
215
216 return err;
217} 248}
218 249
219static void __exit crc32c_mod_fini(void) 250static void __exit crc32c_mod_fini(void)
220{ 251{
221 crypto_unregister_alg(&alg); 252 crypto_unregister_shash(&alg);
222 crypto_unregister_alg(&old_alg);
223} 253}
224 254
225module_init(crc32c_mod_init); 255module_init(crc32c_mod_init);
diff --git a/crypto/crypto_null.c b/crypto/crypto_null.c
index 1f7d53013a22..cb71c9122bc0 100644
--- a/crypto/crypto_null.c
+++ b/crypto/crypto_null.c
@@ -17,6 +17,7 @@
17 * 17 *
18 */ 18 */
19 19
20#include <crypto/internal/hash.h>
20#include <crypto/internal/skcipher.h> 21#include <crypto/internal/skcipher.h>
21#include <linux/init.h> 22#include <linux/init.h>
22#include <linux/module.h> 23#include <linux/module.h>
@@ -38,15 +39,31 @@ static int null_compress(struct crypto_tfm *tfm, const u8 *src,
38 return 0; 39 return 0;
39} 40}
40 41
41static void null_init(struct crypto_tfm *tfm) 42static int null_init(struct shash_desc *desc)
42{ } 43{
44 return 0;
45}
43 46
44static void null_update(struct crypto_tfm *tfm, const u8 *data, 47static int null_update(struct shash_desc *desc, const u8 *data,
45 unsigned int len) 48 unsigned int len)
46{ } 49{
50 return 0;
51}
47 52
48static void null_final(struct crypto_tfm *tfm, u8 *out) 53static int null_final(struct shash_desc *desc, u8 *out)
49{ } 54{
55 return 0;
56}
57
58static int null_digest(struct shash_desc *desc, const u8 *data,
59 unsigned int len, u8 *out)
60{
61 return 0;
62}
63
64static int null_hash_setkey(struct crypto_shash *tfm, const u8 *key,
65 unsigned int keylen)
66{ return 0; }
50 67
51static int null_setkey(struct crypto_tfm *tfm, const u8 *key, 68static int null_setkey(struct crypto_tfm *tfm, const u8 *key,
52 unsigned int keylen) 69 unsigned int keylen)
@@ -89,19 +106,20 @@ static struct crypto_alg compress_null = {
89 .coa_decompress = null_compress } } 106 .coa_decompress = null_compress } }
90}; 107};
91 108
92static struct crypto_alg digest_null = { 109static struct shash_alg digest_null = {
93 .cra_name = "digest_null", 110 .digestsize = NULL_DIGEST_SIZE,
94 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 111 .setkey = null_hash_setkey,
95 .cra_blocksize = NULL_BLOCK_SIZE, 112 .init = null_init,
96 .cra_ctxsize = 0, 113 .update = null_update,
97 .cra_module = THIS_MODULE, 114 .finup = null_digest,
98 .cra_list = LIST_HEAD_INIT(digest_null.cra_list), 115 .digest = null_digest,
99 .cra_u = { .digest = { 116 .final = null_final,
100 .dia_digestsize = NULL_DIGEST_SIZE, 117 .base = {
101 .dia_setkey = null_setkey, 118 .cra_name = "digest_null",
102 .dia_init = null_init, 119 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
103 .dia_update = null_update, 120 .cra_blocksize = NULL_BLOCK_SIZE,
104 .dia_final = null_final } } 121 .cra_module = THIS_MODULE,
122 }
105}; 123};
106 124
107static struct crypto_alg cipher_null = { 125static struct crypto_alg cipher_null = {
@@ -154,7 +172,7 @@ static int __init crypto_null_mod_init(void)
154 if (ret < 0) 172 if (ret < 0)
155 goto out_unregister_cipher; 173 goto out_unregister_cipher;
156 174
157 ret = crypto_register_alg(&digest_null); 175 ret = crypto_register_shash(&digest_null);
158 if (ret < 0) 176 if (ret < 0)
159 goto out_unregister_skcipher; 177 goto out_unregister_skcipher;
160 178
@@ -166,7 +184,7 @@ out:
166 return ret; 184 return ret;
167 185
168out_unregister_digest: 186out_unregister_digest:
169 crypto_unregister_alg(&digest_null); 187 crypto_unregister_shash(&digest_null);
170out_unregister_skcipher: 188out_unregister_skcipher:
171 crypto_unregister_alg(&skcipher_null); 189 crypto_unregister_alg(&skcipher_null);
172out_unregister_cipher: 190out_unregister_cipher:
@@ -177,7 +195,7 @@ out_unregister_cipher:
177static void __exit crypto_null_mod_fini(void) 195static void __exit crypto_null_mod_fini(void)
178{ 196{
179 crypto_unregister_alg(&compress_null); 197 crypto_unregister_alg(&compress_null);
180 crypto_unregister_alg(&digest_null); 198 crypto_unregister_shash(&digest_null);
181 crypto_unregister_alg(&skcipher_null); 199 crypto_unregister_alg(&skcipher_null);
182 crypto_unregister_alg(&cipher_null); 200 crypto_unregister_alg(&cipher_null);
183} 201}
diff --git a/crypto/des_generic.c b/crypto/des_generic.c
index 5d0e4580f998..5bd3ee345a64 100644
--- a/crypto/des_generic.c
+++ b/crypto/des_generic.c
@@ -868,9 +868,10 @@ static int des3_ede_setkey(struct crypto_tfm *tfm, const u8 *key,
868 u32 *flags = &tfm->crt_flags; 868 u32 *flags = &tfm->crt_flags;
869 869
870 if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) || 870 if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
871 !((K[2] ^ K[4]) | (K[3] ^ K[5])))) 871 !((K[2] ^ K[4]) | (K[3] ^ K[5]))) &&
872 (*flags & CRYPTO_TFM_REQ_WEAK_KEY))
872 { 873 {
873 *flags |= CRYPTO_TFM_RES_BAD_KEY_SCHED; 874 *flags |= CRYPTO_TFM_RES_WEAK_KEY;
874 return -EINVAL; 875 return -EINVAL;
875 } 876 }
876 877
diff --git a/crypto/fcrypt.c b/crypto/fcrypt.c
index 1302f4cae337..b82d61f4e26c 100644
--- a/crypto/fcrypt.c
+++ b/crypto/fcrypt.c
@@ -73,7 +73,7 @@ do { \
73 * /afs/transarc.com/public/afsps/afs.rel31b.export-src/rxkad/sboxes.h 73 * /afs/transarc.com/public/afsps/afs.rel31b.export-src/rxkad/sboxes.h
74 */ 74 */
75#undef Z 75#undef Z
76#define Z(x) __constant_cpu_to_be32(x << 3) 76#define Z(x) cpu_to_be32(x << 3)
77static const __be32 sbox0[256] = { 77static const __be32 sbox0[256] = {
78 Z(0xea), Z(0x7f), Z(0xb2), Z(0x64), Z(0x9d), Z(0xb0), Z(0xd9), Z(0x11), 78 Z(0xea), Z(0x7f), Z(0xb2), Z(0x64), Z(0x9d), Z(0xb0), Z(0xd9), Z(0x11),
79 Z(0xcd), Z(0x86), Z(0x86), Z(0x91), Z(0x0a), Z(0xb2), Z(0x93), Z(0x06), 79 Z(0xcd), Z(0x86), Z(0x86), Z(0x91), Z(0x0a), Z(0xb2), Z(0x93), Z(0x06),
@@ -110,7 +110,7 @@ static const __be32 sbox0[256] = {
110}; 110};
111 111
112#undef Z 112#undef Z
113#define Z(x) __constant_cpu_to_be32((x << 27) | (x >> 5)) 113#define Z(x) cpu_to_be32((x << 27) | (x >> 5))
114static const __be32 sbox1[256] = { 114static const __be32 sbox1[256] = {
115 Z(0x77), Z(0x14), Z(0xa6), Z(0xfe), Z(0xb2), Z(0x5e), Z(0x8c), Z(0x3e), 115 Z(0x77), Z(0x14), Z(0xa6), Z(0xfe), Z(0xb2), Z(0x5e), Z(0x8c), Z(0x3e),
116 Z(0x67), Z(0x6c), Z(0xa1), Z(0x0d), Z(0xc2), Z(0xa2), Z(0xc1), Z(0x85), 116 Z(0x67), Z(0x6c), Z(0xa1), Z(0x0d), Z(0xc2), Z(0xa2), Z(0xc1), Z(0x85),
@@ -147,7 +147,7 @@ static const __be32 sbox1[256] = {
147}; 147};
148 148
149#undef Z 149#undef Z
150#define Z(x) __constant_cpu_to_be32(x << 11) 150#define Z(x) cpu_to_be32(x << 11)
151static const __be32 sbox2[256] = { 151static const __be32 sbox2[256] = {
152 Z(0xf0), Z(0x37), Z(0x24), Z(0x53), Z(0x2a), Z(0x03), Z(0x83), Z(0x86), 152 Z(0xf0), Z(0x37), Z(0x24), Z(0x53), Z(0x2a), Z(0x03), Z(0x83), Z(0x86),
153 Z(0xd1), Z(0xec), Z(0x50), Z(0xf0), Z(0x42), Z(0x78), Z(0x2f), Z(0x6d), 153 Z(0xd1), Z(0xec), Z(0x50), Z(0xf0), Z(0x42), Z(0x78), Z(0x2f), Z(0x6d),
@@ -184,7 +184,7 @@ static const __be32 sbox2[256] = {
184}; 184};
185 185
186#undef Z 186#undef Z
187#define Z(x) __constant_cpu_to_be32(x << 19) 187#define Z(x) cpu_to_be32(x << 19)
188static const __be32 sbox3[256] = { 188static const __be32 sbox3[256] = {
189 Z(0xa9), Z(0x2a), Z(0x48), Z(0x51), Z(0x84), Z(0x7e), Z(0x49), Z(0xe2), 189 Z(0xa9), Z(0x2a), Z(0x48), Z(0x51), Z(0x84), Z(0x7e), Z(0x49), Z(0xe2),
190 Z(0xb5), Z(0xb7), Z(0x42), Z(0x33), Z(0x7d), Z(0x5d), Z(0xa6), Z(0x12), 190 Z(0xb5), Z(0xb7), Z(0x42), Z(0x33), Z(0x7d), Z(0x5d), Z(0xa6), Z(0x12),
diff --git a/crypto/hmac.c b/crypto/hmac.c
index 7ff2d6a8c7d0..0ad39c374963 100644
--- a/crypto/hmac.c
+++ b/crypto/hmac.c
@@ -16,7 +16,7 @@
16 * 16 *
17 */ 17 */
18 18
19#include <crypto/algapi.h> 19#include <crypto/internal/hash.h>
20#include <crypto/scatterwalk.h> 20#include <crypto/scatterwalk.h>
21#include <linux/err.h> 21#include <linux/err.h>
22#include <linux/init.h> 22#include <linux/init.h>
@@ -238,9 +238,11 @@ static struct crypto_instance *hmac_alloc(struct rtattr **tb)
238 return ERR_CAST(alg); 238 return ERR_CAST(alg);
239 239
240 inst = ERR_PTR(-EINVAL); 240 inst = ERR_PTR(-EINVAL);
241 ds = (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == 241 ds = alg->cra_type == &crypto_hash_type ?
242 CRYPTO_ALG_TYPE_HASH ? alg->cra_hash.digestsize : 242 alg->cra_hash.digestsize :
243 alg->cra_digest.dia_digestsize; 243 alg->cra_type ?
244 __crypto_shash_alg(alg)->digestsize :
245 alg->cra_digest.dia_digestsize;
244 if (ds > alg->cra_blocksize) 246 if (ds > alg->cra_blocksize)
245 goto out_put_alg; 247 goto out_put_alg;
246 248
diff --git a/crypto/internal.h b/crypto/internal.h
index 8ef72d76092e..3c19a27a7563 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -109,6 +109,8 @@ void crypto_alg_tested(const char *name, int err);
109void crypto_shoot_alg(struct crypto_alg *alg); 109void crypto_shoot_alg(struct crypto_alg *alg);
110struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, 110struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
111 u32 mask); 111 u32 mask);
112struct crypto_tfm *crypto_create_tfm(struct crypto_alg *alg,
113 const struct crypto_type *frontend);
112 114
113int crypto_register_instance(struct crypto_template *tmpl, 115int crypto_register_instance(struct crypto_template *tmpl,
114 struct crypto_instance *inst); 116 struct crypto_instance *inst);
diff --git a/crypto/md4.c b/crypto/md4.c
index 3c19aa0750fd..7fca1f59a4f5 100644
--- a/crypto/md4.c
+++ b/crypto/md4.c
@@ -20,8 +20,8 @@
20 * (at your option) any later version. 20 * (at your option) any later version.
21 * 21 *
22 */ 22 */
23#include <crypto/internal/hash.h>
23#include <linux/init.h> 24#include <linux/init.h>
24#include <linux/crypto.h>
25#include <linux/kernel.h> 25#include <linux/kernel.h>
26#include <linux/string.h> 26#include <linux/string.h>
27#include <linux/types.h> 27#include <linux/types.h>
@@ -58,7 +58,7 @@ static inline u32 H(u32 x, u32 y, u32 z)
58{ 58{
59 return x ^ y ^ z; 59 return x ^ y ^ z;
60} 60}
61 61
62#define ROUND1(a,b,c,d,k,s) (a = lshift(a + F(b,c,d) + k, s)) 62#define ROUND1(a,b,c,d,k,s) (a = lshift(a + F(b,c,d) + k, s))
63#define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s)) 63#define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s))
64#define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s)) 64#define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s))
@@ -148,24 +148,26 @@ static void md4_transform(u32 *hash, u32 const *in)
148 148
149static inline void md4_transform_helper(struct md4_ctx *ctx) 149static inline void md4_transform_helper(struct md4_ctx *ctx)
150{ 150{
151 le32_to_cpu_array(ctx->block, sizeof(ctx->block) / sizeof(u32)); 151 le32_to_cpu_array(ctx->block, ARRAY_SIZE(ctx->block));
152 md4_transform(ctx->hash, ctx->block); 152 md4_transform(ctx->hash, ctx->block);
153} 153}
154 154
155static void md4_init(struct crypto_tfm *tfm) 155static int md4_init(struct shash_desc *desc)
156{ 156{
157 struct md4_ctx *mctx = crypto_tfm_ctx(tfm); 157 struct md4_ctx *mctx = shash_desc_ctx(desc);
158 158
159 mctx->hash[0] = 0x67452301; 159 mctx->hash[0] = 0x67452301;
160 mctx->hash[1] = 0xefcdab89; 160 mctx->hash[1] = 0xefcdab89;
161 mctx->hash[2] = 0x98badcfe; 161 mctx->hash[2] = 0x98badcfe;
162 mctx->hash[3] = 0x10325476; 162 mctx->hash[3] = 0x10325476;
163 mctx->byte_count = 0; 163 mctx->byte_count = 0;
164
165 return 0;
164} 166}
165 167
166static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) 168static int md4_update(struct shash_desc *desc, const u8 *data, unsigned int len)
167{ 169{
168 struct md4_ctx *mctx = crypto_tfm_ctx(tfm); 170 struct md4_ctx *mctx = shash_desc_ctx(desc);
169 const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); 171 const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
170 172
171 mctx->byte_count += len; 173 mctx->byte_count += len;
@@ -173,7 +175,7 @@ static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
173 if (avail > len) { 175 if (avail > len) {
174 memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), 176 memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
175 data, len); 177 data, len);
176 return; 178 return 0;
177 } 179 }
178 180
179 memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), 181 memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
@@ -191,11 +193,13 @@ static void md4_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
191 } 193 }
192 194
193 memcpy(mctx->block, data, len); 195 memcpy(mctx->block, data, len);
196
197 return 0;
194} 198}
195 199
196static void md4_final(struct crypto_tfm *tfm, u8 *out) 200static int md4_final(struct shash_desc *desc, u8 *out)
197{ 201{
198 struct md4_ctx *mctx = crypto_tfm_ctx(tfm); 202 struct md4_ctx *mctx = shash_desc_ctx(desc);
199 const unsigned int offset = mctx->byte_count & 0x3f; 203 const unsigned int offset = mctx->byte_count & 0x3f;
200 char *p = (char *)mctx->block + offset; 204 char *p = (char *)mctx->block + offset;
201 int padding = 56 - (offset + 1); 205 int padding = 56 - (offset + 1);
@@ -214,33 +218,35 @@ static void md4_final(struct crypto_tfm *tfm, u8 *out)
214 le32_to_cpu_array(mctx->block, (sizeof(mctx->block) - 218 le32_to_cpu_array(mctx->block, (sizeof(mctx->block) -
215 sizeof(u64)) / sizeof(u32)); 219 sizeof(u64)) / sizeof(u32));
216 md4_transform(mctx->hash, mctx->block); 220 md4_transform(mctx->hash, mctx->block);
217 cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(u32)); 221 cpu_to_le32_array(mctx->hash, ARRAY_SIZE(mctx->hash));
218 memcpy(out, mctx->hash, sizeof(mctx->hash)); 222 memcpy(out, mctx->hash, sizeof(mctx->hash));
219 memset(mctx, 0, sizeof(*mctx)); 223 memset(mctx, 0, sizeof(*mctx));
224
225 return 0;
220} 226}
221 227
222static struct crypto_alg alg = { 228static struct shash_alg alg = {
223 .cra_name = "md4", 229 .digestsize = MD4_DIGEST_SIZE,
224 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 230 .init = md4_init,
225 .cra_blocksize = MD4_HMAC_BLOCK_SIZE, 231 .update = md4_update,
226 .cra_ctxsize = sizeof(struct md4_ctx), 232 .final = md4_final,
227 .cra_module = THIS_MODULE, 233 .descsize = sizeof(struct md4_ctx),
228 .cra_list = LIST_HEAD_INIT(alg.cra_list), 234 .base = {
229 .cra_u = { .digest = { 235 .cra_name = "md4",
230 .dia_digestsize = MD4_DIGEST_SIZE, 236 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
231 .dia_init = md4_init, 237 .cra_blocksize = MD4_HMAC_BLOCK_SIZE,
232 .dia_update = md4_update, 238 .cra_module = THIS_MODULE,
233 .dia_final = md4_final } } 239 }
234}; 240};
235 241
236static int __init md4_mod_init(void) 242static int __init md4_mod_init(void)
237{ 243{
238 return crypto_register_alg(&alg); 244 return crypto_register_shash(&alg);
239} 245}
240 246
241static void __exit md4_mod_fini(void) 247static void __exit md4_mod_fini(void)
242{ 248{
243 crypto_unregister_alg(&alg); 249 crypto_unregister_shash(&alg);
244} 250}
245 251
246module_init(md4_mod_init); 252module_init(md4_mod_init);
diff --git a/crypto/md5.c b/crypto/md5.c
index 39268f3d2f1d..83eb52961750 100644
--- a/crypto/md5.c
+++ b/crypto/md5.c
@@ -15,10 +15,10 @@
15 * any later version. 15 * any later version.
16 * 16 *
17 */ 17 */
18#include <crypto/internal/hash.h>
18#include <linux/init.h> 19#include <linux/init.h>
19#include <linux/module.h> 20#include <linux/module.h>
20#include <linux/string.h> 21#include <linux/string.h>
21#include <linux/crypto.h>
22#include <linux/types.h> 22#include <linux/types.h>
23#include <asm/byteorder.h> 23#include <asm/byteorder.h>
24 24
@@ -147,20 +147,22 @@ static inline void md5_transform_helper(struct md5_ctx *ctx)
147 md5_transform(ctx->hash, ctx->block); 147 md5_transform(ctx->hash, ctx->block);
148} 148}
149 149
150static void md5_init(struct crypto_tfm *tfm) 150static int md5_init(struct shash_desc *desc)
151{ 151{
152 struct md5_ctx *mctx = crypto_tfm_ctx(tfm); 152 struct md5_ctx *mctx = shash_desc_ctx(desc);
153 153
154 mctx->hash[0] = 0x67452301; 154 mctx->hash[0] = 0x67452301;
155 mctx->hash[1] = 0xefcdab89; 155 mctx->hash[1] = 0xefcdab89;
156 mctx->hash[2] = 0x98badcfe; 156 mctx->hash[2] = 0x98badcfe;
157 mctx->hash[3] = 0x10325476; 157 mctx->hash[3] = 0x10325476;
158 mctx->byte_count = 0; 158 mctx->byte_count = 0;
159
160 return 0;
159} 161}
160 162
161static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) 163static int md5_update(struct shash_desc *desc, const u8 *data, unsigned int len)
162{ 164{
163 struct md5_ctx *mctx = crypto_tfm_ctx(tfm); 165 struct md5_ctx *mctx = shash_desc_ctx(desc);
164 const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f); 166 const u32 avail = sizeof(mctx->block) - (mctx->byte_count & 0x3f);
165 167
166 mctx->byte_count += len; 168 mctx->byte_count += len;
@@ -168,7 +170,7 @@ static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
168 if (avail > len) { 170 if (avail > len) {
169 memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), 171 memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
170 data, len); 172 data, len);
171 return; 173 return 0;
172 } 174 }
173 175
174 memcpy((char *)mctx->block + (sizeof(mctx->block) - avail), 176 memcpy((char *)mctx->block + (sizeof(mctx->block) - avail),
@@ -186,11 +188,13 @@ static void md5_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
186 } 188 }
187 189
188 memcpy(mctx->block, data, len); 190 memcpy(mctx->block, data, len);
191
192 return 0;
189} 193}
190 194
191static void md5_final(struct crypto_tfm *tfm, u8 *out) 195static int md5_final(struct shash_desc *desc, u8 *out)
192{ 196{
193 struct md5_ctx *mctx = crypto_tfm_ctx(tfm); 197 struct md5_ctx *mctx = shash_desc_ctx(desc);
194 const unsigned int offset = mctx->byte_count & 0x3f; 198 const unsigned int offset = mctx->byte_count & 0x3f;
195 char *p = (char *)mctx->block + offset; 199 char *p = (char *)mctx->block + offset;
196 int padding = 56 - (offset + 1); 200 int padding = 56 - (offset + 1);
@@ -212,30 +216,32 @@ static void md5_final(struct crypto_tfm *tfm, u8 *out)
212 cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(u32)); 216 cpu_to_le32_array(mctx->hash, sizeof(mctx->hash) / sizeof(u32));
213 memcpy(out, mctx->hash, sizeof(mctx->hash)); 217 memcpy(out, mctx->hash, sizeof(mctx->hash));
214 memset(mctx, 0, sizeof(*mctx)); 218 memset(mctx, 0, sizeof(*mctx));
219
220 return 0;
215} 221}
216 222
217static struct crypto_alg alg = { 223static struct shash_alg alg = {
218 .cra_name = "md5", 224 .digestsize = MD5_DIGEST_SIZE,
219 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 225 .init = md5_init,
220 .cra_blocksize = MD5_HMAC_BLOCK_SIZE, 226 .update = md5_update,
221 .cra_ctxsize = sizeof(struct md5_ctx), 227 .final = md5_final,
222 .cra_module = THIS_MODULE, 228 .descsize = sizeof(struct md5_ctx),
223 .cra_list = LIST_HEAD_INIT(alg.cra_list), 229 .base = {
224 .cra_u = { .digest = { 230 .cra_name = "md5",
225 .dia_digestsize = MD5_DIGEST_SIZE, 231 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
226 .dia_init = md5_init, 232 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
227 .dia_update = md5_update, 233 .cra_module = THIS_MODULE,
228 .dia_final = md5_final } } 234 }
229}; 235};
230 236
231static int __init md5_mod_init(void) 237static int __init md5_mod_init(void)
232{ 238{
233 return crypto_register_alg(&alg); 239 return crypto_register_shash(&alg);
234} 240}
235 241
236static void __exit md5_mod_fini(void) 242static void __exit md5_mod_fini(void)
237{ 243{
238 crypto_unregister_alg(&alg); 244 crypto_unregister_shash(&alg);
239} 245}
240 246
241module_init(md5_mod_init); 247module_init(md5_mod_init);
diff --git a/crypto/michael_mic.c b/crypto/michael_mic.c
index 9e917b8011b1..079b761bc70d 100644
--- a/crypto/michael_mic.c
+++ b/crypto/michael_mic.c
@@ -9,23 +9,25 @@
9 * it under the terms of the GNU General Public License version 2 as 9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation. 10 * published by the Free Software Foundation.
11 */ 11 */
12 12#include <crypto/internal/hash.h>
13#include <asm/byteorder.h> 13#include <asm/byteorder.h>
14#include <linux/init.h> 14#include <linux/init.h>
15#include <linux/module.h> 15#include <linux/module.h>
16#include <linux/string.h> 16#include <linux/string.h>
17#include <linux/crypto.h>
18#include <linux/types.h> 17#include <linux/types.h>
19 18
20 19
21struct michael_mic_ctx { 20struct michael_mic_ctx {
21 u32 l, r;
22};
23
24struct michael_mic_desc_ctx {
22 u8 pending[4]; 25 u8 pending[4];
23 size_t pending_len; 26 size_t pending_len;
24 27
25 u32 l, r; 28 u32 l, r;
26}; 29};
27 30
28
29static inline u32 xswap(u32 val) 31static inline u32 xswap(u32 val)
30{ 32{
31 return ((val & 0x00ff00ff) << 8) | ((val & 0xff00ff00) >> 8); 33 return ((val & 0x00ff00ff) << 8) | ((val & 0xff00ff00) >> 8);
@@ -45,17 +47,22 @@ do { \
45} while (0) 47} while (0)
46 48
47 49
48static void michael_init(struct crypto_tfm *tfm) 50static int michael_init(struct shash_desc *desc)
49{ 51{
50 struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); 52 struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc);
53 struct michael_mic_ctx *ctx = crypto_shash_ctx(desc->tfm);
51 mctx->pending_len = 0; 54 mctx->pending_len = 0;
55 mctx->l = ctx->l;
56 mctx->r = ctx->r;
57
58 return 0;
52} 59}
53 60
54 61
55static void michael_update(struct crypto_tfm *tfm, const u8 *data, 62static int michael_update(struct shash_desc *desc, const u8 *data,
56 unsigned int len) 63 unsigned int len)
57{ 64{
58 struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); 65 struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc);
59 const __le32 *src; 66 const __le32 *src;
60 67
61 if (mctx->pending_len) { 68 if (mctx->pending_len) {
@@ -68,7 +75,7 @@ static void michael_update(struct crypto_tfm *tfm, const u8 *data,
68 len -= flen; 75 len -= flen;
69 76
70 if (mctx->pending_len < 4) 77 if (mctx->pending_len < 4)
71 return; 78 return 0;
72 79
73 src = (const __le32 *)mctx->pending; 80 src = (const __le32 *)mctx->pending;
74 mctx->l ^= le32_to_cpup(src); 81 mctx->l ^= le32_to_cpup(src);
@@ -88,12 +95,14 @@ static void michael_update(struct crypto_tfm *tfm, const u8 *data,
88 mctx->pending_len = len; 95 mctx->pending_len = len;
89 memcpy(mctx->pending, src, len); 96 memcpy(mctx->pending, src, len);
90 } 97 }
98
99 return 0;
91} 100}
92 101
93 102
94static void michael_final(struct crypto_tfm *tfm, u8 *out) 103static int michael_final(struct shash_desc *desc, u8 *out)
95{ 104{
96 struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); 105 struct michael_mic_desc_ctx *mctx = shash_desc_ctx(desc);
97 u8 *data = mctx->pending; 106 u8 *data = mctx->pending;
98 __le32 *dst = (__le32 *)out; 107 __le32 *dst = (__le32 *)out;
99 108
@@ -119,17 +128,20 @@ static void michael_final(struct crypto_tfm *tfm, u8 *out)
119 128
120 dst[0] = cpu_to_le32(mctx->l); 129 dst[0] = cpu_to_le32(mctx->l);
121 dst[1] = cpu_to_le32(mctx->r); 130 dst[1] = cpu_to_le32(mctx->r);
131
132 return 0;
122} 133}
123 134
124 135
125static int michael_setkey(struct crypto_tfm *tfm, const u8 *key, 136static int michael_setkey(struct crypto_shash *tfm, const u8 *key,
126 unsigned int keylen) 137 unsigned int keylen)
127{ 138{
128 struct michael_mic_ctx *mctx = crypto_tfm_ctx(tfm); 139 struct michael_mic_ctx *mctx = crypto_shash_ctx(tfm);
140
129 const __le32 *data = (const __le32 *)key; 141 const __le32 *data = (const __le32 *)key;
130 142
131 if (keylen != 8) { 143 if (keylen != 8) {
132 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 144 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
133 return -EINVAL; 145 return -EINVAL;
134 } 146 }
135 147
@@ -138,33 +150,31 @@ static int michael_setkey(struct crypto_tfm *tfm, const u8 *key,
138 return 0; 150 return 0;
139} 151}
140 152
141 153static struct shash_alg alg = {
142static struct crypto_alg michael_mic_alg = { 154 .digestsize = 8,
143 .cra_name = "michael_mic", 155 .setkey = michael_setkey,
144 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 156 .init = michael_init,
145 .cra_blocksize = 8, 157 .update = michael_update,
146 .cra_ctxsize = sizeof(struct michael_mic_ctx), 158 .final = michael_final,
147 .cra_module = THIS_MODULE, 159 .descsize = sizeof(struct michael_mic_desc_ctx),
148 .cra_alignmask = 3, 160 .base = {
149 .cra_list = LIST_HEAD_INIT(michael_mic_alg.cra_list), 161 .cra_name = "michael_mic",
150 .cra_u = { .digest = { 162 .cra_blocksize = 8,
151 .dia_digestsize = 8, 163 .cra_alignmask = 3,
152 .dia_init = michael_init, 164 .cra_ctxsize = sizeof(struct michael_mic_ctx),
153 .dia_update = michael_update, 165 .cra_module = THIS_MODULE,
154 .dia_final = michael_final, 166 }
155 .dia_setkey = michael_setkey } }
156}; 167};
157 168
158
159static int __init michael_mic_init(void) 169static int __init michael_mic_init(void)
160{ 170{
161 return crypto_register_alg(&michael_mic_alg); 171 return crypto_register_shash(&alg);
162} 172}
163 173
164 174
165static void __exit michael_mic_exit(void) 175static void __exit michael_mic_exit(void)
166{ 176{
167 crypto_unregister_alg(&michael_mic_alg); 177 crypto_unregister_shash(&alg);
168} 178}
169 179
170 180
diff --git a/crypto/proc.c b/crypto/proc.c
index 37a13d05636d..5dc07e442fca 100644
--- a/crypto/proc.c
+++ b/crypto/proc.c
@@ -94,6 +94,17 @@ static int c_show(struct seq_file *m, void *p)
94 seq_printf(m, "selftest : %s\n", 94 seq_printf(m, "selftest : %s\n",
95 (alg->cra_flags & CRYPTO_ALG_TESTED) ? 95 (alg->cra_flags & CRYPTO_ALG_TESTED) ?
96 "passed" : "unknown"); 96 "passed" : "unknown");
97
98 if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
99 seq_printf(m, "type : larval\n");
100 seq_printf(m, "flags : 0x%x\n", alg->cra_flags);
101 goto out;
102 }
103
104 if (alg->cra_type && alg->cra_type->show) {
105 alg->cra_type->show(m, alg);
106 goto out;
107 }
97 108
98 switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) { 109 switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
99 case CRYPTO_ALG_TYPE_CIPHER: 110 case CRYPTO_ALG_TYPE_CIPHER:
@@ -115,16 +126,11 @@ static int c_show(struct seq_file *m, void *p)
115 seq_printf(m, "type : compression\n"); 126 seq_printf(m, "type : compression\n");
116 break; 127 break;
117 default: 128 default:
118 if (alg->cra_flags & CRYPTO_ALG_LARVAL) { 129 seq_printf(m, "type : unknown\n");
119 seq_printf(m, "type : larval\n");
120 seq_printf(m, "flags : 0x%x\n", alg->cra_flags);
121 } else if (alg->cra_type && alg->cra_type->show)
122 alg->cra_type->show(m, alg);
123 else
124 seq_printf(m, "type : unknown\n");
125 break; 130 break;
126 } 131 }
127 132
133out:
128 seq_putc(m, '\n'); 134 seq_putc(m, '\n');
129 return 0; 135 return 0;
130} 136}
diff --git a/crypto/rmd128.c b/crypto/rmd128.c
index 5de6fa2a76fb..1ceb6735aa53 100644
--- a/crypto/rmd128.c
+++ b/crypto/rmd128.c
@@ -13,11 +13,10 @@
13 * any later version. 13 * any later version.
14 * 14 *
15 */ 15 */
16#include <crypto/internal/hash.h>
16#include <linux/init.h> 17#include <linux/init.h>
17#include <linux/module.h> 18#include <linux/module.h>
18#include <linux/mm.h> 19#include <linux/mm.h>
19#include <linux/crypto.h>
20#include <linux/cryptohash.h>
21#include <linux/types.h> 20#include <linux/types.h>
22#include <asm/byteorder.h> 21#include <asm/byteorder.h>
23 22
@@ -218,9 +217,9 @@ static void rmd128_transform(u32 *state, const __le32 *in)
218 return; 217 return;
219} 218}
220 219
221static void rmd128_init(struct crypto_tfm *tfm) 220static int rmd128_init(struct shash_desc *desc)
222{ 221{
223 struct rmd128_ctx *rctx = crypto_tfm_ctx(tfm); 222 struct rmd128_ctx *rctx = shash_desc_ctx(desc);
224 223
225 rctx->byte_count = 0; 224 rctx->byte_count = 0;
226 225
@@ -230,12 +229,14 @@ static void rmd128_init(struct crypto_tfm *tfm)
230 rctx->state[3] = RMD_H3; 229 rctx->state[3] = RMD_H3;
231 230
232 memset(rctx->buffer, 0, sizeof(rctx->buffer)); 231 memset(rctx->buffer, 0, sizeof(rctx->buffer));
232
233 return 0;
233} 234}
234 235
235static void rmd128_update(struct crypto_tfm *tfm, const u8 *data, 236static int rmd128_update(struct shash_desc *desc, const u8 *data,
236 unsigned int len) 237 unsigned int len)
237{ 238{
238 struct rmd128_ctx *rctx = crypto_tfm_ctx(tfm); 239 struct rmd128_ctx *rctx = shash_desc_ctx(desc);
239 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); 240 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
240 241
241 rctx->byte_count += len; 242 rctx->byte_count += len;
@@ -244,7 +245,7 @@ static void rmd128_update(struct crypto_tfm *tfm, const u8 *data,
244 if (avail > len) { 245 if (avail > len) {
245 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), 246 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
246 data, len); 247 data, len);
247 return; 248 goto out;
248 } 249 }
249 250
250 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), 251 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
@@ -262,12 +263,15 @@ static void rmd128_update(struct crypto_tfm *tfm, const u8 *data,
262 } 263 }
263 264
264 memcpy(rctx->buffer, data, len); 265 memcpy(rctx->buffer, data, len);
266
267out:
268 return 0;
265} 269}
266 270
267/* Add padding and return the message digest. */ 271/* Add padding and return the message digest. */
268static void rmd128_final(struct crypto_tfm *tfm, u8 *out) 272static int rmd128_final(struct shash_desc *desc, u8 *out)
269{ 273{
270 struct rmd128_ctx *rctx = crypto_tfm_ctx(tfm); 274 struct rmd128_ctx *rctx = shash_desc_ctx(desc);
271 u32 i, index, padlen; 275 u32 i, index, padlen;
272 __le64 bits; 276 __le64 bits;
273 __le32 *dst = (__le32 *)out; 277 __le32 *dst = (__le32 *)out;
@@ -278,10 +282,10 @@ static void rmd128_final(struct crypto_tfm *tfm, u8 *out)
278 /* Pad out to 56 mod 64 */ 282 /* Pad out to 56 mod 64 */
279 index = rctx->byte_count & 0x3f; 283 index = rctx->byte_count & 0x3f;
280 padlen = (index < 56) ? (56 - index) : ((64+56) - index); 284 padlen = (index < 56) ? (56 - index) : ((64+56) - index);
281 rmd128_update(tfm, padding, padlen); 285 rmd128_update(desc, padding, padlen);
282 286
283 /* Append length */ 287 /* Append length */
284 rmd128_update(tfm, (const u8 *)&bits, sizeof(bits)); 288 rmd128_update(desc, (const u8 *)&bits, sizeof(bits));
285 289
286 /* Store state in digest */ 290 /* Store state in digest */
287 for (i = 0; i < 4; i++) 291 for (i = 0; i < 4; i++)
@@ -289,31 +293,32 @@ static void rmd128_final(struct crypto_tfm *tfm, u8 *out)
289 293
290 /* Wipe context */ 294 /* Wipe context */
291 memset(rctx, 0, sizeof(*rctx)); 295 memset(rctx, 0, sizeof(*rctx));
296
297 return 0;
292} 298}
293 299
294static struct crypto_alg alg = { 300static struct shash_alg alg = {
295 .cra_name = "rmd128", 301 .digestsize = RMD128_DIGEST_SIZE,
296 .cra_driver_name = "rmd128", 302 .init = rmd128_init,
297 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 303 .update = rmd128_update,
298 .cra_blocksize = RMD128_BLOCK_SIZE, 304 .final = rmd128_final,
299 .cra_ctxsize = sizeof(struct rmd128_ctx), 305 .descsize = sizeof(struct rmd128_ctx),
300 .cra_module = THIS_MODULE, 306 .base = {
301 .cra_list = LIST_HEAD_INIT(alg.cra_list), 307 .cra_name = "rmd128",
302 .cra_u = { .digest = { 308 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
303 .dia_digestsize = RMD128_DIGEST_SIZE, 309 .cra_blocksize = RMD128_BLOCK_SIZE,
304 .dia_init = rmd128_init, 310 .cra_module = THIS_MODULE,
305 .dia_update = rmd128_update, 311 }
306 .dia_final = rmd128_final } }
307}; 312};
308 313
309static int __init rmd128_mod_init(void) 314static int __init rmd128_mod_init(void)
310{ 315{
311 return crypto_register_alg(&alg); 316 return crypto_register_shash(&alg);
312} 317}
313 318
314static void __exit rmd128_mod_fini(void) 319static void __exit rmd128_mod_fini(void)
315{ 320{
316 crypto_unregister_alg(&alg); 321 crypto_unregister_shash(&alg);
317} 322}
318 323
319module_init(rmd128_mod_init); 324module_init(rmd128_mod_init);
@@ -321,5 +326,3 @@ module_exit(rmd128_mod_fini);
321 326
322MODULE_LICENSE("GPL"); 327MODULE_LICENSE("GPL");
323MODULE_DESCRIPTION("RIPEMD-128 Message Digest"); 328MODULE_DESCRIPTION("RIPEMD-128 Message Digest");
324
325MODULE_ALIAS("rmd128");
diff --git a/crypto/rmd160.c b/crypto/rmd160.c
index f001ec775e1f..472261fc913f 100644
--- a/crypto/rmd160.c
+++ b/crypto/rmd160.c
@@ -13,11 +13,10 @@
13 * any later version. 13 * any later version.
14 * 14 *
15 */ 15 */
16#include <crypto/internal/hash.h>
16#include <linux/init.h> 17#include <linux/init.h>
17#include <linux/module.h> 18#include <linux/module.h>
18#include <linux/mm.h> 19#include <linux/mm.h>
19#include <linux/crypto.h>
20#include <linux/cryptohash.h>
21#include <linux/types.h> 20#include <linux/types.h>
22#include <asm/byteorder.h> 21#include <asm/byteorder.h>
23 22
@@ -261,9 +260,9 @@ static void rmd160_transform(u32 *state, const __le32 *in)
261 return; 260 return;
262} 261}
263 262
264static void rmd160_init(struct crypto_tfm *tfm) 263static int rmd160_init(struct shash_desc *desc)
265{ 264{
266 struct rmd160_ctx *rctx = crypto_tfm_ctx(tfm); 265 struct rmd160_ctx *rctx = shash_desc_ctx(desc);
267 266
268 rctx->byte_count = 0; 267 rctx->byte_count = 0;
269 268
@@ -274,12 +273,14 @@ static void rmd160_init(struct crypto_tfm *tfm)
274 rctx->state[4] = RMD_H4; 273 rctx->state[4] = RMD_H4;
275 274
276 memset(rctx->buffer, 0, sizeof(rctx->buffer)); 275 memset(rctx->buffer, 0, sizeof(rctx->buffer));
276
277 return 0;
277} 278}
278 279
279static void rmd160_update(struct crypto_tfm *tfm, const u8 *data, 280static int rmd160_update(struct shash_desc *desc, const u8 *data,
280 unsigned int len) 281 unsigned int len)
281{ 282{
282 struct rmd160_ctx *rctx = crypto_tfm_ctx(tfm); 283 struct rmd160_ctx *rctx = shash_desc_ctx(desc);
283 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); 284 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
284 285
285 rctx->byte_count += len; 286 rctx->byte_count += len;
@@ -288,7 +289,7 @@ static void rmd160_update(struct crypto_tfm *tfm, const u8 *data,
288 if (avail > len) { 289 if (avail > len) {
289 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), 290 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
290 data, len); 291 data, len);
291 return; 292 goto out;
292 } 293 }
293 294
294 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), 295 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
@@ -306,12 +307,15 @@ static void rmd160_update(struct crypto_tfm *tfm, const u8 *data,
306 } 307 }
307 308
308 memcpy(rctx->buffer, data, len); 309 memcpy(rctx->buffer, data, len);
310
311out:
312 return 0;
309} 313}
310 314
311/* Add padding and return the message digest. */ 315/* Add padding and return the message digest. */
312static void rmd160_final(struct crypto_tfm *tfm, u8 *out) 316static int rmd160_final(struct shash_desc *desc, u8 *out)
313{ 317{
314 struct rmd160_ctx *rctx = crypto_tfm_ctx(tfm); 318 struct rmd160_ctx *rctx = shash_desc_ctx(desc);
315 u32 i, index, padlen; 319 u32 i, index, padlen;
316 __le64 bits; 320 __le64 bits;
317 __le32 *dst = (__le32 *)out; 321 __le32 *dst = (__le32 *)out;
@@ -322,10 +326,10 @@ static void rmd160_final(struct crypto_tfm *tfm, u8 *out)
322 /* Pad out to 56 mod 64 */ 326 /* Pad out to 56 mod 64 */
323 index = rctx->byte_count & 0x3f; 327 index = rctx->byte_count & 0x3f;
324 padlen = (index < 56) ? (56 - index) : ((64+56) - index); 328 padlen = (index < 56) ? (56 - index) : ((64+56) - index);
325 rmd160_update(tfm, padding, padlen); 329 rmd160_update(desc, padding, padlen);
326 330
327 /* Append length */ 331 /* Append length */
328 rmd160_update(tfm, (const u8 *)&bits, sizeof(bits)); 332 rmd160_update(desc, (const u8 *)&bits, sizeof(bits));
329 333
330 /* Store state in digest */ 334 /* Store state in digest */
331 for (i = 0; i < 5; i++) 335 for (i = 0; i < 5; i++)
@@ -333,31 +337,32 @@ static void rmd160_final(struct crypto_tfm *tfm, u8 *out)
333 337
334 /* Wipe context */ 338 /* Wipe context */
335 memset(rctx, 0, sizeof(*rctx)); 339 memset(rctx, 0, sizeof(*rctx));
340
341 return 0;
336} 342}
337 343
338static struct crypto_alg alg = { 344static struct shash_alg alg = {
339 .cra_name = "rmd160", 345 .digestsize = RMD160_DIGEST_SIZE,
340 .cra_driver_name = "rmd160", 346 .init = rmd160_init,
341 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 347 .update = rmd160_update,
342 .cra_blocksize = RMD160_BLOCK_SIZE, 348 .final = rmd160_final,
343 .cra_ctxsize = sizeof(struct rmd160_ctx), 349 .descsize = sizeof(struct rmd160_ctx),
344 .cra_module = THIS_MODULE, 350 .base = {
345 .cra_list = LIST_HEAD_INIT(alg.cra_list), 351 .cra_name = "rmd160",
346 .cra_u = { .digest = { 352 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
347 .dia_digestsize = RMD160_DIGEST_SIZE, 353 .cra_blocksize = RMD160_BLOCK_SIZE,
348 .dia_init = rmd160_init, 354 .cra_module = THIS_MODULE,
349 .dia_update = rmd160_update, 355 }
350 .dia_final = rmd160_final } }
351}; 356};
352 357
353static int __init rmd160_mod_init(void) 358static int __init rmd160_mod_init(void)
354{ 359{
355 return crypto_register_alg(&alg); 360 return crypto_register_shash(&alg);
356} 361}
357 362
358static void __exit rmd160_mod_fini(void) 363static void __exit rmd160_mod_fini(void)
359{ 364{
360 crypto_unregister_alg(&alg); 365 crypto_unregister_shash(&alg);
361} 366}
362 367
363module_init(rmd160_mod_init); 368module_init(rmd160_mod_init);
@@ -365,5 +370,3 @@ module_exit(rmd160_mod_fini);
365 370
366MODULE_LICENSE("GPL"); 371MODULE_LICENSE("GPL");
367MODULE_DESCRIPTION("RIPEMD-160 Message Digest"); 372MODULE_DESCRIPTION("RIPEMD-160 Message Digest");
368
369MODULE_ALIAS("rmd160");
diff --git a/crypto/rmd256.c b/crypto/rmd256.c
index e3de5b4cb47f..72eafa8d2e7b 100644
--- a/crypto/rmd256.c
+++ b/crypto/rmd256.c
@@ -13,11 +13,10 @@
13 * any later version. 13 * any later version.
14 * 14 *
15 */ 15 */
16#include <crypto/internal/hash.h>
16#include <linux/init.h> 17#include <linux/init.h>
17#include <linux/module.h> 18#include <linux/module.h>
18#include <linux/mm.h> 19#include <linux/mm.h>
19#include <linux/crypto.h>
20#include <linux/cryptohash.h>
21#include <linux/types.h> 20#include <linux/types.h>
22#include <asm/byteorder.h> 21#include <asm/byteorder.h>
23 22
@@ -233,9 +232,9 @@ static void rmd256_transform(u32 *state, const __le32 *in)
233 return; 232 return;
234} 233}
235 234
236static void rmd256_init(struct crypto_tfm *tfm) 235static int rmd256_init(struct shash_desc *desc)
237{ 236{
238 struct rmd256_ctx *rctx = crypto_tfm_ctx(tfm); 237 struct rmd256_ctx *rctx = shash_desc_ctx(desc);
239 238
240 rctx->byte_count = 0; 239 rctx->byte_count = 0;
241 240
@@ -249,12 +248,14 @@ static void rmd256_init(struct crypto_tfm *tfm)
249 rctx->state[7] = RMD_H8; 248 rctx->state[7] = RMD_H8;
250 249
251 memset(rctx->buffer, 0, sizeof(rctx->buffer)); 250 memset(rctx->buffer, 0, sizeof(rctx->buffer));
251
252 return 0;
252} 253}
253 254
254static void rmd256_update(struct crypto_tfm *tfm, const u8 *data, 255static int rmd256_update(struct shash_desc *desc, const u8 *data,
255 unsigned int len) 256 unsigned int len)
256{ 257{
257 struct rmd256_ctx *rctx = crypto_tfm_ctx(tfm); 258 struct rmd256_ctx *rctx = shash_desc_ctx(desc);
258 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); 259 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
259 260
260 rctx->byte_count += len; 261 rctx->byte_count += len;
@@ -263,7 +264,7 @@ static void rmd256_update(struct crypto_tfm *tfm, const u8 *data,
263 if (avail > len) { 264 if (avail > len) {
264 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), 265 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
265 data, len); 266 data, len);
266 return; 267 goto out;
267 } 268 }
268 269
269 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), 270 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
@@ -281,12 +282,15 @@ static void rmd256_update(struct crypto_tfm *tfm, const u8 *data,
281 } 282 }
282 283
283 memcpy(rctx->buffer, data, len); 284 memcpy(rctx->buffer, data, len);
285
286out:
287 return 0;
284} 288}
285 289
286/* Add padding and return the message digest. */ 290/* Add padding and return the message digest. */
287static void rmd256_final(struct crypto_tfm *tfm, u8 *out) 291static int rmd256_final(struct shash_desc *desc, u8 *out)
288{ 292{
289 struct rmd256_ctx *rctx = crypto_tfm_ctx(tfm); 293 struct rmd256_ctx *rctx = shash_desc_ctx(desc);
290 u32 i, index, padlen; 294 u32 i, index, padlen;
291 __le64 bits; 295 __le64 bits;
292 __le32 *dst = (__le32 *)out; 296 __le32 *dst = (__le32 *)out;
@@ -297,10 +301,10 @@ static void rmd256_final(struct crypto_tfm *tfm, u8 *out)
297 /* Pad out to 56 mod 64 */ 301 /* Pad out to 56 mod 64 */
298 index = rctx->byte_count & 0x3f; 302 index = rctx->byte_count & 0x3f;
299 padlen = (index < 56) ? (56 - index) : ((64+56) - index); 303 padlen = (index < 56) ? (56 - index) : ((64+56) - index);
300 rmd256_update(tfm, padding, padlen); 304 rmd256_update(desc, padding, padlen);
301 305
302 /* Append length */ 306 /* Append length */
303 rmd256_update(tfm, (const u8 *)&bits, sizeof(bits)); 307 rmd256_update(desc, (const u8 *)&bits, sizeof(bits));
304 308
305 /* Store state in digest */ 309 /* Store state in digest */
306 for (i = 0; i < 8; i++) 310 for (i = 0; i < 8; i++)
@@ -308,31 +312,32 @@ static void rmd256_final(struct crypto_tfm *tfm, u8 *out)
308 312
309 /* Wipe context */ 313 /* Wipe context */
310 memset(rctx, 0, sizeof(*rctx)); 314 memset(rctx, 0, sizeof(*rctx));
315
316 return 0;
311} 317}
312 318
313static struct crypto_alg alg = { 319static struct shash_alg alg = {
314 .cra_name = "rmd256", 320 .digestsize = RMD256_DIGEST_SIZE,
315 .cra_driver_name = "rmd256", 321 .init = rmd256_init,
316 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 322 .update = rmd256_update,
317 .cra_blocksize = RMD256_BLOCK_SIZE, 323 .final = rmd256_final,
318 .cra_ctxsize = sizeof(struct rmd256_ctx), 324 .descsize = sizeof(struct rmd256_ctx),
319 .cra_module = THIS_MODULE, 325 .base = {
320 .cra_list = LIST_HEAD_INIT(alg.cra_list), 326 .cra_name = "rmd256",
321 .cra_u = { .digest = { 327 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
322 .dia_digestsize = RMD256_DIGEST_SIZE, 328 .cra_blocksize = RMD256_BLOCK_SIZE,
323 .dia_init = rmd256_init, 329 .cra_module = THIS_MODULE,
324 .dia_update = rmd256_update, 330 }
325 .dia_final = rmd256_final } }
326}; 331};
327 332
328static int __init rmd256_mod_init(void) 333static int __init rmd256_mod_init(void)
329{ 334{
330 return crypto_register_alg(&alg); 335 return crypto_register_shash(&alg);
331} 336}
332 337
333static void __exit rmd256_mod_fini(void) 338static void __exit rmd256_mod_fini(void)
334{ 339{
335 crypto_unregister_alg(&alg); 340 crypto_unregister_shash(&alg);
336} 341}
337 342
338module_init(rmd256_mod_init); 343module_init(rmd256_mod_init);
@@ -340,5 +345,3 @@ module_exit(rmd256_mod_fini);
340 345
341MODULE_LICENSE("GPL"); 346MODULE_LICENSE("GPL");
342MODULE_DESCRIPTION("RIPEMD-256 Message Digest"); 347MODULE_DESCRIPTION("RIPEMD-256 Message Digest");
343
344MODULE_ALIAS("rmd256");
diff --git a/crypto/rmd320.c b/crypto/rmd320.c
index b143d66e42c8..86becaba2f05 100644
--- a/crypto/rmd320.c
+++ b/crypto/rmd320.c
@@ -13,11 +13,10 @@
13 * any later version. 13 * any later version.
14 * 14 *
15 */ 15 */
16#include <crypto/internal/hash.h>
16#include <linux/init.h> 17#include <linux/init.h>
17#include <linux/module.h> 18#include <linux/module.h>
18#include <linux/mm.h> 19#include <linux/mm.h>
19#include <linux/crypto.h>
20#include <linux/cryptohash.h>
21#include <linux/types.h> 20#include <linux/types.h>
22#include <asm/byteorder.h> 21#include <asm/byteorder.h>
23 22
@@ -280,9 +279,9 @@ static void rmd320_transform(u32 *state, const __le32 *in)
280 return; 279 return;
281} 280}
282 281
283static void rmd320_init(struct crypto_tfm *tfm) 282static int rmd320_init(struct shash_desc *desc)
284{ 283{
285 struct rmd320_ctx *rctx = crypto_tfm_ctx(tfm); 284 struct rmd320_ctx *rctx = shash_desc_ctx(desc);
286 285
287 rctx->byte_count = 0; 286 rctx->byte_count = 0;
288 287
@@ -298,12 +297,14 @@ static void rmd320_init(struct crypto_tfm *tfm)
298 rctx->state[9] = RMD_H9; 297 rctx->state[9] = RMD_H9;
299 298
300 memset(rctx->buffer, 0, sizeof(rctx->buffer)); 299 memset(rctx->buffer, 0, sizeof(rctx->buffer));
300
301 return 0;
301} 302}
302 303
303static void rmd320_update(struct crypto_tfm *tfm, const u8 *data, 304static int rmd320_update(struct shash_desc *desc, const u8 *data,
304 unsigned int len) 305 unsigned int len)
305{ 306{
306 struct rmd320_ctx *rctx = crypto_tfm_ctx(tfm); 307 struct rmd320_ctx *rctx = shash_desc_ctx(desc);
307 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); 308 const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f);
308 309
309 rctx->byte_count += len; 310 rctx->byte_count += len;
@@ -312,7 +313,7 @@ static void rmd320_update(struct crypto_tfm *tfm, const u8 *data,
312 if (avail > len) { 313 if (avail > len) {
313 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), 314 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
314 data, len); 315 data, len);
315 return; 316 goto out;
316 } 317 }
317 318
318 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), 319 memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail),
@@ -330,12 +331,15 @@ static void rmd320_update(struct crypto_tfm *tfm, const u8 *data,
330 } 331 }
331 332
332 memcpy(rctx->buffer, data, len); 333 memcpy(rctx->buffer, data, len);
334
335out:
336 return 0;
333} 337}
334 338
335/* Add padding and return the message digest. */ 339/* Add padding and return the message digest. */
336static void rmd320_final(struct crypto_tfm *tfm, u8 *out) 340static int rmd320_final(struct shash_desc *desc, u8 *out)
337{ 341{
338 struct rmd320_ctx *rctx = crypto_tfm_ctx(tfm); 342 struct rmd320_ctx *rctx = shash_desc_ctx(desc);
339 u32 i, index, padlen; 343 u32 i, index, padlen;
340 __le64 bits; 344 __le64 bits;
341 __le32 *dst = (__le32 *)out; 345 __le32 *dst = (__le32 *)out;
@@ -346,10 +350,10 @@ static void rmd320_final(struct crypto_tfm *tfm, u8 *out)
346 /* Pad out to 56 mod 64 */ 350 /* Pad out to 56 mod 64 */
347 index = rctx->byte_count & 0x3f; 351 index = rctx->byte_count & 0x3f;
348 padlen = (index < 56) ? (56 - index) : ((64+56) - index); 352 padlen = (index < 56) ? (56 - index) : ((64+56) - index);
349 rmd320_update(tfm, padding, padlen); 353 rmd320_update(desc, padding, padlen);
350 354
351 /* Append length */ 355 /* Append length */
352 rmd320_update(tfm, (const u8 *)&bits, sizeof(bits)); 356 rmd320_update(desc, (const u8 *)&bits, sizeof(bits));
353 357
354 /* Store state in digest */ 358 /* Store state in digest */
355 for (i = 0; i < 10; i++) 359 for (i = 0; i < 10; i++)
@@ -357,31 +361,32 @@ static void rmd320_final(struct crypto_tfm *tfm, u8 *out)
357 361
358 /* Wipe context */ 362 /* Wipe context */
359 memset(rctx, 0, sizeof(*rctx)); 363 memset(rctx, 0, sizeof(*rctx));
364
365 return 0;
360} 366}
361 367
362static struct crypto_alg alg = { 368static struct shash_alg alg = {
363 .cra_name = "rmd320", 369 .digestsize = RMD320_DIGEST_SIZE,
364 .cra_driver_name = "rmd320", 370 .init = rmd320_init,
365 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 371 .update = rmd320_update,
366 .cra_blocksize = RMD320_BLOCK_SIZE, 372 .final = rmd320_final,
367 .cra_ctxsize = sizeof(struct rmd320_ctx), 373 .descsize = sizeof(struct rmd320_ctx),
368 .cra_module = THIS_MODULE, 374 .base = {
369 .cra_list = LIST_HEAD_INIT(alg.cra_list), 375 .cra_name = "rmd320",
370 .cra_u = { .digest = { 376 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
371 .dia_digestsize = RMD320_DIGEST_SIZE, 377 .cra_blocksize = RMD320_BLOCK_SIZE,
372 .dia_init = rmd320_init, 378 .cra_module = THIS_MODULE,
373 .dia_update = rmd320_update, 379 }
374 .dia_final = rmd320_final } }
375}; 380};
376 381
377static int __init rmd320_mod_init(void) 382static int __init rmd320_mod_init(void)
378{ 383{
379 return crypto_register_alg(&alg); 384 return crypto_register_shash(&alg);
380} 385}
381 386
382static void __exit rmd320_mod_fini(void) 387static void __exit rmd320_mod_fini(void)
383{ 388{
384 crypto_unregister_alg(&alg); 389 crypto_unregister_shash(&alg);
385} 390}
386 391
387module_init(rmd320_mod_init); 392module_init(rmd320_mod_init);
@@ -389,5 +394,3 @@ module_exit(rmd320_mod_fini);
389 394
390MODULE_LICENSE("GPL"); 395MODULE_LICENSE("GPL");
391MODULE_DESCRIPTION("RIPEMD-320 Message Digest"); 396MODULE_DESCRIPTION("RIPEMD-320 Message Digest");
392
393MODULE_ALIAS("rmd320");
diff --git a/crypto/salsa20_generic.c b/crypto/salsa20_generic.c
index b07d55981741..eac10c11685c 100644
--- a/crypto/salsa20_generic.c
+++ b/crypto/salsa20_generic.c
@@ -24,6 +24,7 @@
24#include <linux/errno.h> 24#include <linux/errno.h>
25#include <linux/crypto.h> 25#include <linux/crypto.h>
26#include <linux/types.h> 26#include <linux/types.h>
27#include <linux/bitops.h>
27#include <crypto/algapi.h> 28#include <crypto/algapi.h>
28#include <asm/byteorder.h> 29#include <asm/byteorder.h>
29 30
@@ -42,10 +43,6 @@ D. J. Bernstein
42Public domain. 43Public domain.
43*/ 44*/
44 45
45#define ROTATE(v,n) (((v) << (n)) | ((v) >> (32 - (n))))
46#define XOR(v,w) ((v) ^ (w))
47#define PLUS(v,w) (((v) + (w)))
48#define PLUSONE(v) (PLUS((v),1))
49#define U32TO8_LITTLE(p, v) \ 46#define U32TO8_LITTLE(p, v) \
50 { (p)[0] = (v >> 0) & 0xff; (p)[1] = (v >> 8) & 0xff; \ 47 { (p)[0] = (v >> 0) & 0xff; (p)[1] = (v >> 8) & 0xff; \
51 (p)[2] = (v >> 16) & 0xff; (p)[3] = (v >> 24) & 0xff; } 48 (p)[2] = (v >> 16) & 0xff; (p)[3] = (v >> 24) & 0xff; }
@@ -65,41 +62,41 @@ static void salsa20_wordtobyte(u8 output[64], const u32 input[16])
65 62
66 memcpy(x, input, sizeof(x)); 63 memcpy(x, input, sizeof(x));
67 for (i = 20; i > 0; i -= 2) { 64 for (i = 20; i > 0; i -= 2) {
68 x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 0],x[12]), 7)); 65 x[ 4] ^= rol32((x[ 0] + x[12]), 7);
69 x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[ 4],x[ 0]), 9)); 66 x[ 8] ^= rol32((x[ 4] + x[ 0]), 9);
70 x[12] = XOR(x[12],ROTATE(PLUS(x[ 8],x[ 4]),13)); 67 x[12] ^= rol32((x[ 8] + x[ 4]), 13);
71 x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[12],x[ 8]),18)); 68 x[ 0] ^= rol32((x[12] + x[ 8]), 18);
72 x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 5],x[ 1]), 7)); 69 x[ 9] ^= rol32((x[ 5] + x[ 1]), 7);
73 x[13] = XOR(x[13],ROTATE(PLUS(x[ 9],x[ 5]), 9)); 70 x[13] ^= rol32((x[ 9] + x[ 5]), 9);
74 x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[13],x[ 9]),13)); 71 x[ 1] ^= rol32((x[13] + x[ 9]), 13);
75 x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 1],x[13]),18)); 72 x[ 5] ^= rol32((x[ 1] + x[13]), 18);
76 x[14] = XOR(x[14],ROTATE(PLUS(x[10],x[ 6]), 7)); 73 x[14] ^= rol32((x[10] + x[ 6]), 7);
77 x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[14],x[10]), 9)); 74 x[ 2] ^= rol32((x[14] + x[10]), 9);
78 x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 2],x[14]),13)); 75 x[ 6] ^= rol32((x[ 2] + x[14]), 13);
79 x[10] = XOR(x[10],ROTATE(PLUS(x[ 6],x[ 2]),18)); 76 x[10] ^= rol32((x[ 6] + x[ 2]), 18);
80 x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[15],x[11]), 7)); 77 x[ 3] ^= rol32((x[15] + x[11]), 7);
81 x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 3],x[15]), 9)); 78 x[ 7] ^= rol32((x[ 3] + x[15]), 9);
82 x[11] = XOR(x[11],ROTATE(PLUS(x[ 7],x[ 3]),13)); 79 x[11] ^= rol32((x[ 7] + x[ 3]), 13);
83 x[15] = XOR(x[15],ROTATE(PLUS(x[11],x[ 7]),18)); 80 x[15] ^= rol32((x[11] + x[ 7]), 18);
84 x[ 1] = XOR(x[ 1],ROTATE(PLUS(x[ 0],x[ 3]), 7)); 81 x[ 1] ^= rol32((x[ 0] + x[ 3]), 7);
85 x[ 2] = XOR(x[ 2],ROTATE(PLUS(x[ 1],x[ 0]), 9)); 82 x[ 2] ^= rol32((x[ 1] + x[ 0]), 9);
86 x[ 3] = XOR(x[ 3],ROTATE(PLUS(x[ 2],x[ 1]),13)); 83 x[ 3] ^= rol32((x[ 2] + x[ 1]), 13);
87 x[ 0] = XOR(x[ 0],ROTATE(PLUS(x[ 3],x[ 2]),18)); 84 x[ 0] ^= rol32((x[ 3] + x[ 2]), 18);
88 x[ 6] = XOR(x[ 6],ROTATE(PLUS(x[ 5],x[ 4]), 7)); 85 x[ 6] ^= rol32((x[ 5] + x[ 4]), 7);
89 x[ 7] = XOR(x[ 7],ROTATE(PLUS(x[ 6],x[ 5]), 9)); 86 x[ 7] ^= rol32((x[ 6] + x[ 5]), 9);
90 x[ 4] = XOR(x[ 4],ROTATE(PLUS(x[ 7],x[ 6]),13)); 87 x[ 4] ^= rol32((x[ 7] + x[ 6]), 13);
91 x[ 5] = XOR(x[ 5],ROTATE(PLUS(x[ 4],x[ 7]),18)); 88 x[ 5] ^= rol32((x[ 4] + x[ 7]), 18);
92 x[11] = XOR(x[11],ROTATE(PLUS(x[10],x[ 9]), 7)); 89 x[11] ^= rol32((x[10] + x[ 9]), 7);
93 x[ 8] = XOR(x[ 8],ROTATE(PLUS(x[11],x[10]), 9)); 90 x[ 8] ^= rol32((x[11] + x[10]), 9);
94 x[ 9] = XOR(x[ 9],ROTATE(PLUS(x[ 8],x[11]),13)); 91 x[ 9] ^= rol32((x[ 8] + x[11]), 13);
95 x[10] = XOR(x[10],ROTATE(PLUS(x[ 9],x[ 8]),18)); 92 x[10] ^= rol32((x[ 9] + x[ 8]), 18);
96 x[12] = XOR(x[12],ROTATE(PLUS(x[15],x[14]), 7)); 93 x[12] ^= rol32((x[15] + x[14]), 7);
97 x[13] = XOR(x[13],ROTATE(PLUS(x[12],x[15]), 9)); 94 x[13] ^= rol32((x[12] + x[15]), 9);
98 x[14] = XOR(x[14],ROTATE(PLUS(x[13],x[12]),13)); 95 x[14] ^= rol32((x[13] + x[12]), 13);
99 x[15] = XOR(x[15],ROTATE(PLUS(x[14],x[13]),18)); 96 x[15] ^= rol32((x[14] + x[13]), 18);
100 } 97 }
101 for (i = 0; i < 16; ++i) 98 for (i = 0; i < 16; ++i)
102 x[i] = PLUS(x[i],input[i]); 99 x[i] += input[i];
103 for (i = 0; i < 16; ++i) 100 for (i = 0; i < 16; ++i)
104 U32TO8_LITTLE(output + 4 * i,x[i]); 101 U32TO8_LITTLE(output + 4 * i,x[i]);
105} 102}
@@ -150,9 +147,9 @@ static void salsa20_encrypt_bytes(struct salsa20_ctx *ctx, u8 *dst,
150 while (bytes) { 147 while (bytes) {
151 salsa20_wordtobyte(buf, ctx->input); 148 salsa20_wordtobyte(buf, ctx->input);
152 149
153 ctx->input[8] = PLUSONE(ctx->input[8]); 150 ctx->input[8]++;
154 if (!ctx->input[8]) 151 if (!ctx->input[8])
155 ctx->input[9] = PLUSONE(ctx->input[9]); 152 ctx->input[9]++;
156 153
157 if (bytes <= 64) { 154 if (bytes <= 64) {
158 crypto_xor(dst, buf, bytes); 155 crypto_xor(dst, buf, bytes);
diff --git a/crypto/sha1_generic.c b/crypto/sha1_generic.c
index c7c6899e1fca..9efef20454cb 100644
--- a/crypto/sha1_generic.c
+++ b/crypto/sha1_generic.c
@@ -16,10 +16,10 @@
16 * any later version. 16 * any later version.
17 * 17 *
18 */ 18 */
19#include <crypto/internal/hash.h>
19#include <linux/init.h> 20#include <linux/init.h>
20#include <linux/module.h> 21#include <linux/module.h>
21#include <linux/mm.h> 22#include <linux/mm.h>
22#include <linux/crypto.h>
23#include <linux/cryptohash.h> 23#include <linux/cryptohash.h>
24#include <linux/types.h> 24#include <linux/types.h>
25#include <crypto/sha.h> 25#include <crypto/sha.h>
@@ -31,9 +31,10 @@ struct sha1_ctx {
31 u8 buffer[64]; 31 u8 buffer[64];
32}; 32};
33 33
34static void sha1_init(struct crypto_tfm *tfm) 34static int sha1_init(struct shash_desc *desc)
35{ 35{
36 struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); 36 struct sha1_ctx *sctx = shash_desc_ctx(desc);
37
37 static const struct sha1_ctx initstate = { 38 static const struct sha1_ctx initstate = {
38 0, 39 0,
39 { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 }, 40 { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
@@ -41,12 +42,14 @@ static void sha1_init(struct crypto_tfm *tfm)
41 }; 42 };
42 43
43 *sctx = initstate; 44 *sctx = initstate;
45
46 return 0;
44} 47}
45 48
46static void sha1_update(struct crypto_tfm *tfm, const u8 *data, 49static int sha1_update(struct shash_desc *desc, const u8 *data,
47 unsigned int len) 50 unsigned int len)
48{ 51{
49 struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); 52 struct sha1_ctx *sctx = shash_desc_ctx(desc);
50 unsigned int partial, done; 53 unsigned int partial, done;
51 const u8 *src; 54 const u8 *src;
52 55
@@ -74,13 +77,15 @@ static void sha1_update(struct crypto_tfm *tfm, const u8 *data,
74 partial = 0; 77 partial = 0;
75 } 78 }
76 memcpy(sctx->buffer + partial, src, len - done); 79 memcpy(sctx->buffer + partial, src, len - done);
80
81 return 0;
77} 82}
78 83
79 84
80/* Add padding and return the message digest. */ 85/* Add padding and return the message digest. */
81static void sha1_final(struct crypto_tfm *tfm, u8 *out) 86static int sha1_final(struct shash_desc *desc, u8 *out)
82{ 87{
83 struct sha1_ctx *sctx = crypto_tfm_ctx(tfm); 88 struct sha1_ctx *sctx = shash_desc_ctx(desc);
84 __be32 *dst = (__be32 *)out; 89 __be32 *dst = (__be32 *)out;
85 u32 i, index, padlen; 90 u32 i, index, padlen;
86 __be64 bits; 91 __be64 bits;
@@ -91,10 +96,10 @@ static void sha1_final(struct crypto_tfm *tfm, u8 *out)
91 /* Pad out to 56 mod 64 */ 96 /* Pad out to 56 mod 64 */
92 index = sctx->count & 0x3f; 97 index = sctx->count & 0x3f;
93 padlen = (index < 56) ? (56 - index) : ((64+56) - index); 98 padlen = (index < 56) ? (56 - index) : ((64+56) - index);
94 sha1_update(tfm, padding, padlen); 99 sha1_update(desc, padding, padlen);
95 100
96 /* Append length */ 101 /* Append length */
97 sha1_update(tfm, (const u8 *)&bits, sizeof(bits)); 102 sha1_update(desc, (const u8 *)&bits, sizeof(bits));
98 103
99 /* Store state in digest */ 104 /* Store state in digest */
100 for (i = 0; i < 5; i++) 105 for (i = 0; i < 5; i++)
@@ -102,32 +107,33 @@ static void sha1_final(struct crypto_tfm *tfm, u8 *out)
102 107
103 /* Wipe context */ 108 /* Wipe context */
104 memset(sctx, 0, sizeof *sctx); 109 memset(sctx, 0, sizeof *sctx);
110
111 return 0;
105} 112}
106 113
107static struct crypto_alg alg = { 114static struct shash_alg alg = {
108 .cra_name = "sha1", 115 .digestsize = SHA1_DIGEST_SIZE,
109 .cra_driver_name= "sha1-generic", 116 .init = sha1_init,
110 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 117 .update = sha1_update,
111 .cra_blocksize = SHA1_BLOCK_SIZE, 118 .final = sha1_final,
112 .cra_ctxsize = sizeof(struct sha1_ctx), 119 .descsize = sizeof(struct sha1_ctx),
113 .cra_module = THIS_MODULE, 120 .base = {
114 .cra_alignmask = 3, 121 .cra_name = "sha1",
115 .cra_list = LIST_HEAD_INIT(alg.cra_list), 122 .cra_driver_name= "sha1-generic",
116 .cra_u = { .digest = { 123 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
117 .dia_digestsize = SHA1_DIGEST_SIZE, 124 .cra_blocksize = SHA1_BLOCK_SIZE,
118 .dia_init = sha1_init, 125 .cra_module = THIS_MODULE,
119 .dia_update = sha1_update, 126 }
120 .dia_final = sha1_final } }
121}; 127};
122 128
123static int __init sha1_generic_mod_init(void) 129static int __init sha1_generic_mod_init(void)
124{ 130{
125 return crypto_register_alg(&alg); 131 return crypto_register_shash(&alg);
126} 132}
127 133
128static void __exit sha1_generic_mod_fini(void) 134static void __exit sha1_generic_mod_fini(void)
129{ 135{
130 crypto_unregister_alg(&alg); 136 crypto_unregister_shash(&alg);
131} 137}
132 138
133module_init(sha1_generic_mod_init); 139module_init(sha1_generic_mod_init);
diff --git a/crypto/sha256_generic.c b/crypto/sha256_generic.c
index 5a8dd47558e5..caa3542e6ce8 100644
--- a/crypto/sha256_generic.c
+++ b/crypto/sha256_generic.c
@@ -17,10 +17,10 @@
17 * any later version. 17 * any later version.
18 * 18 *
19 */ 19 */
20#include <crypto/internal/hash.h>
20#include <linux/init.h> 21#include <linux/init.h>
21#include <linux/module.h> 22#include <linux/module.h>
22#include <linux/mm.h> 23#include <linux/mm.h>
23#include <linux/crypto.h>
24#include <linux/types.h> 24#include <linux/types.h>
25#include <crypto/sha.h> 25#include <crypto/sha.h>
26#include <asm/byteorder.h> 26#include <asm/byteorder.h>
@@ -69,7 +69,7 @@ static void sha256_transform(u32 *state, const u8 *input)
69 /* now blend */ 69 /* now blend */
70 for (i = 16; i < 64; i++) 70 for (i = 16; i < 64; i++)
71 BLEND_OP(i, W); 71 BLEND_OP(i, W);
72 72
73 /* load the state into our registers */ 73 /* load the state into our registers */
74 a=state[0]; b=state[1]; c=state[2]; d=state[3]; 74 a=state[0]; b=state[1]; c=state[2]; d=state[3];
75 e=state[4]; f=state[5]; g=state[6]; h=state[7]; 75 e=state[4]; f=state[5]; g=state[6]; h=state[7];
@@ -220,9 +220,9 @@ static void sha256_transform(u32 *state, const u8 *input)
220} 220}
221 221
222 222
223static void sha224_init(struct crypto_tfm *tfm) 223static int sha224_init(struct shash_desc *desc)
224{ 224{
225 struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); 225 struct sha256_ctx *sctx = shash_desc_ctx(desc);
226 sctx->state[0] = SHA224_H0; 226 sctx->state[0] = SHA224_H0;
227 sctx->state[1] = SHA224_H1; 227 sctx->state[1] = SHA224_H1;
228 sctx->state[2] = SHA224_H2; 228 sctx->state[2] = SHA224_H2;
@@ -233,11 +233,13 @@ static void sha224_init(struct crypto_tfm *tfm)
233 sctx->state[7] = SHA224_H7; 233 sctx->state[7] = SHA224_H7;
234 sctx->count[0] = 0; 234 sctx->count[0] = 0;
235 sctx->count[1] = 0; 235 sctx->count[1] = 0;
236
237 return 0;
236} 238}
237 239
238static void sha256_init(struct crypto_tfm *tfm) 240static int sha256_init(struct shash_desc *desc)
239{ 241{
240 struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); 242 struct sha256_ctx *sctx = shash_desc_ctx(desc);
241 sctx->state[0] = SHA256_H0; 243 sctx->state[0] = SHA256_H0;
242 sctx->state[1] = SHA256_H1; 244 sctx->state[1] = SHA256_H1;
243 sctx->state[2] = SHA256_H2; 245 sctx->state[2] = SHA256_H2;
@@ -247,12 +249,14 @@ static void sha256_init(struct crypto_tfm *tfm)
247 sctx->state[6] = SHA256_H6; 249 sctx->state[6] = SHA256_H6;
248 sctx->state[7] = SHA256_H7; 250 sctx->state[7] = SHA256_H7;
249 sctx->count[0] = sctx->count[1] = 0; 251 sctx->count[0] = sctx->count[1] = 0;
252
253 return 0;
250} 254}
251 255
252static void sha256_update(struct crypto_tfm *tfm, const u8 *data, 256static int sha256_update(struct shash_desc *desc, const u8 *data,
253 unsigned int len) 257 unsigned int len)
254{ 258{
255 struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); 259 struct sha256_ctx *sctx = shash_desc_ctx(desc);
256 unsigned int i, index, part_len; 260 unsigned int i, index, part_len;
257 261
258 /* Compute number of bytes mod 128 */ 262 /* Compute number of bytes mod 128 */
@@ -277,14 +281,16 @@ static void sha256_update(struct crypto_tfm *tfm, const u8 *data,
277 } else { 281 } else {
278 i = 0; 282 i = 0;
279 } 283 }
280 284
281 /* Buffer remaining input */ 285 /* Buffer remaining input */
282 memcpy(&sctx->buf[index], &data[i], len-i); 286 memcpy(&sctx->buf[index], &data[i], len-i);
287
288 return 0;
283} 289}
284 290
285static void sha256_final(struct crypto_tfm *tfm, u8 *out) 291static int sha256_final(struct shash_desc *desc, u8 *out)
286{ 292{
287 struct sha256_ctx *sctx = crypto_tfm_ctx(tfm); 293 struct sha256_ctx *sctx = shash_desc_ctx(desc);
288 __be32 *dst = (__be32 *)out; 294 __be32 *dst = (__be32 *)out;
289 __be32 bits[2]; 295 __be32 bits[2];
290 unsigned int index, pad_len; 296 unsigned int index, pad_len;
@@ -298,10 +304,10 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out)
298 /* Pad out to 56 mod 64. */ 304 /* Pad out to 56 mod 64. */
299 index = (sctx->count[0] >> 3) & 0x3f; 305 index = (sctx->count[0] >> 3) & 0x3f;
300 pad_len = (index < 56) ? (56 - index) : ((64+56) - index); 306 pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
301 sha256_update(tfm, padding, pad_len); 307 sha256_update(desc, padding, pad_len);
302 308
303 /* Append length (before padding) */ 309 /* Append length (before padding) */
304 sha256_update(tfm, (const u8 *)bits, sizeof(bits)); 310 sha256_update(desc, (const u8 *)bits, sizeof(bits));
305 311
306 /* Store state in digest */ 312 /* Store state in digest */
307 for (i = 0; i < 8; i++) 313 for (i = 0; i < 8; i++)
@@ -309,71 +315,73 @@ static void sha256_final(struct crypto_tfm *tfm, u8 *out)
309 315
310 /* Zeroize sensitive information. */ 316 /* Zeroize sensitive information. */
311 memset(sctx, 0, sizeof(*sctx)); 317 memset(sctx, 0, sizeof(*sctx));
318
319 return 0;
312} 320}
313 321
314static void sha224_final(struct crypto_tfm *tfm, u8 *hash) 322static int sha224_final(struct shash_desc *desc, u8 *hash)
315{ 323{
316 u8 D[SHA256_DIGEST_SIZE]; 324 u8 D[SHA256_DIGEST_SIZE];
317 325
318 sha256_final(tfm, D); 326 sha256_final(desc, D);
319 327
320 memcpy(hash, D, SHA224_DIGEST_SIZE); 328 memcpy(hash, D, SHA224_DIGEST_SIZE);
321 memset(D, 0, SHA256_DIGEST_SIZE); 329 memset(D, 0, SHA256_DIGEST_SIZE);
330
331 return 0;
322} 332}
323 333
324static struct crypto_alg sha256 = { 334static struct shash_alg sha256 = {
325 .cra_name = "sha256", 335 .digestsize = SHA256_DIGEST_SIZE,
326 .cra_driver_name= "sha256-generic", 336 .init = sha256_init,
327 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 337 .update = sha256_update,
328 .cra_blocksize = SHA256_BLOCK_SIZE, 338 .final = sha256_final,
329 .cra_ctxsize = sizeof(struct sha256_ctx), 339 .descsize = sizeof(struct sha256_ctx),
330 .cra_module = THIS_MODULE, 340 .base = {
331 .cra_alignmask = 3, 341 .cra_name = "sha256",
332 .cra_list = LIST_HEAD_INIT(sha256.cra_list), 342 .cra_driver_name= "sha256-generic",
333 .cra_u = { .digest = { 343 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
334 .dia_digestsize = SHA256_DIGEST_SIZE, 344 .cra_blocksize = SHA256_BLOCK_SIZE,
335 .dia_init = sha256_init, 345 .cra_module = THIS_MODULE,
336 .dia_update = sha256_update, 346 }
337 .dia_final = sha256_final } }
338}; 347};
339 348
340static struct crypto_alg sha224 = { 349static struct shash_alg sha224 = {
341 .cra_name = "sha224", 350 .digestsize = SHA224_DIGEST_SIZE,
342 .cra_driver_name = "sha224-generic", 351 .init = sha224_init,
343 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 352 .update = sha256_update,
344 .cra_blocksize = SHA224_BLOCK_SIZE, 353 .final = sha224_final,
345 .cra_ctxsize = sizeof(struct sha256_ctx), 354 .descsize = sizeof(struct sha256_ctx),
346 .cra_module = THIS_MODULE, 355 .base = {
347 .cra_alignmask = 3, 356 .cra_name = "sha224",
348 .cra_list = LIST_HEAD_INIT(sha224.cra_list), 357 .cra_driver_name= "sha224-generic",
349 .cra_u = { .digest = { 358 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
350 .dia_digestsize = SHA224_DIGEST_SIZE, 359 .cra_blocksize = SHA224_BLOCK_SIZE,
351 .dia_init = sha224_init, 360 .cra_module = THIS_MODULE,
352 .dia_update = sha256_update, 361 }
353 .dia_final = sha224_final } }
354}; 362};
355 363
356static int __init sha256_generic_mod_init(void) 364static int __init sha256_generic_mod_init(void)
357{ 365{
358 int ret = 0; 366 int ret = 0;
359 367
360 ret = crypto_register_alg(&sha224); 368 ret = crypto_register_shash(&sha224);
361 369
362 if (ret < 0) 370 if (ret < 0)
363 return ret; 371 return ret;
364 372
365 ret = crypto_register_alg(&sha256); 373 ret = crypto_register_shash(&sha256);
366 374
367 if (ret < 0) 375 if (ret < 0)
368 crypto_unregister_alg(&sha224); 376 crypto_unregister_shash(&sha224);
369 377
370 return ret; 378 return ret;
371} 379}
372 380
373static void __exit sha256_generic_mod_fini(void) 381static void __exit sha256_generic_mod_fini(void)
374{ 382{
375 crypto_unregister_alg(&sha224); 383 crypto_unregister_shash(&sha224);
376 crypto_unregister_alg(&sha256); 384 crypto_unregister_shash(&sha256);
377} 385}
378 386
379module_init(sha256_generic_mod_init); 387module_init(sha256_generic_mod_init);
diff --git a/crypto/sha512_generic.c b/crypto/sha512_generic.c
index bc3686138aeb..3bea38d12242 100644
--- a/crypto/sha512_generic.c
+++ b/crypto/sha512_generic.c
@@ -10,7 +10,7 @@
10 * later version. 10 * later version.
11 * 11 *
12 */ 12 */
13 13#include <crypto/internal/hash.h>
14#include <linux/kernel.h> 14#include <linux/kernel.h>
15#include <linux/module.h> 15#include <linux/module.h>
16#include <linux/mm.h> 16#include <linux/mm.h>
@@ -18,16 +18,17 @@
18#include <linux/crypto.h> 18#include <linux/crypto.h>
19#include <linux/types.h> 19#include <linux/types.h>
20#include <crypto/sha.h> 20#include <crypto/sha.h>
21 21#include <linux/percpu.h>
22#include <asm/byteorder.h> 22#include <asm/byteorder.h>
23 23
24struct sha512_ctx { 24struct sha512_ctx {
25 u64 state[8]; 25 u64 state[8];
26 u32 count[4]; 26 u32 count[4];
27 u8 buf[128]; 27 u8 buf[128];
28 u64 W[80];
29}; 28};
30 29
30static DEFINE_PER_CPU(u64[80], msg_schedule);
31
31static inline u64 Ch(u64 x, u64 y, u64 z) 32static inline u64 Ch(u64 x, u64 y, u64 z)
32{ 33{
33 return z ^ (x & (y ^ z)); 34 return z ^ (x & (y ^ z));
@@ -89,11 +90,12 @@ static inline void BLEND_OP(int I, u64 *W)
89} 90}
90 91
91static void 92static void
92sha512_transform(u64 *state, u64 *W, const u8 *input) 93sha512_transform(u64 *state, const u8 *input)
93{ 94{
94 u64 a, b, c, d, e, f, g, h, t1, t2; 95 u64 a, b, c, d, e, f, g, h, t1, t2;
95 96
96 int i; 97 int i;
98 u64 *W = get_cpu_var(msg_schedule);
97 99
98 /* load the input */ 100 /* load the input */
99 for (i = 0; i < 16; i++) 101 for (i = 0; i < 16; i++)
@@ -132,12 +134,14 @@ sha512_transform(u64 *state, u64 *W, const u8 *input)
132 134
133 /* erase our data */ 135 /* erase our data */
134 a = b = c = d = e = f = g = h = t1 = t2 = 0; 136 a = b = c = d = e = f = g = h = t1 = t2 = 0;
137 memset(W, 0, sizeof(__get_cpu_var(msg_schedule)));
138 put_cpu_var(msg_schedule);
135} 139}
136 140
137static void 141static int
138sha512_init(struct crypto_tfm *tfm) 142sha512_init(struct shash_desc *desc)
139{ 143{
140 struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); 144 struct sha512_ctx *sctx = shash_desc_ctx(desc);
141 sctx->state[0] = SHA512_H0; 145 sctx->state[0] = SHA512_H0;
142 sctx->state[1] = SHA512_H1; 146 sctx->state[1] = SHA512_H1;
143 sctx->state[2] = SHA512_H2; 147 sctx->state[2] = SHA512_H2;
@@ -147,12 +151,14 @@ sha512_init(struct crypto_tfm *tfm)
147 sctx->state[6] = SHA512_H6; 151 sctx->state[6] = SHA512_H6;
148 sctx->state[7] = SHA512_H7; 152 sctx->state[7] = SHA512_H7;
149 sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; 153 sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0;
154
155 return 0;
150} 156}
151 157
152static void 158static int
153sha384_init(struct crypto_tfm *tfm) 159sha384_init(struct shash_desc *desc)
154{ 160{
155 struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); 161 struct sha512_ctx *sctx = shash_desc_ctx(desc);
156 sctx->state[0] = SHA384_H0; 162 sctx->state[0] = SHA384_H0;
157 sctx->state[1] = SHA384_H1; 163 sctx->state[1] = SHA384_H1;
158 sctx->state[2] = SHA384_H2; 164 sctx->state[2] = SHA384_H2;
@@ -162,12 +168,14 @@ sha384_init(struct crypto_tfm *tfm)
162 sctx->state[6] = SHA384_H6; 168 sctx->state[6] = SHA384_H6;
163 sctx->state[7] = SHA384_H7; 169 sctx->state[7] = SHA384_H7;
164 sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0; 170 sctx->count[0] = sctx->count[1] = sctx->count[2] = sctx->count[3] = 0;
171
172 return 0;
165} 173}
166 174
167static void 175static int
168sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len) 176sha512_update(struct shash_desc *desc, const u8 *data, unsigned int len)
169{ 177{
170 struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); 178 struct sha512_ctx *sctx = shash_desc_ctx(desc);
171 179
172 unsigned int i, index, part_len; 180 unsigned int i, index, part_len;
173 181
@@ -187,10 +195,10 @@ sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
187 /* Transform as many times as possible. */ 195 /* Transform as many times as possible. */
188 if (len >= part_len) { 196 if (len >= part_len) {
189 memcpy(&sctx->buf[index], data, part_len); 197 memcpy(&sctx->buf[index], data, part_len);
190 sha512_transform(sctx->state, sctx->W, sctx->buf); 198 sha512_transform(sctx->state, sctx->buf);
191 199
192 for (i = part_len; i + 127 < len; i+=128) 200 for (i = part_len; i + 127 < len; i+=128)
193 sha512_transform(sctx->state, sctx->W, &data[i]); 201 sha512_transform(sctx->state, &data[i]);
194 202
195 index = 0; 203 index = 0;
196 } else { 204 } else {
@@ -200,14 +208,13 @@ sha512_update(struct crypto_tfm *tfm, const u8 *data, unsigned int len)
200 /* Buffer remaining input */ 208 /* Buffer remaining input */
201 memcpy(&sctx->buf[index], &data[i], len - i); 209 memcpy(&sctx->buf[index], &data[i], len - i);
202 210
203 /* erase our data */ 211 return 0;
204 memset(sctx->W, 0, sizeof(sctx->W));
205} 212}
206 213
207static void 214static int
208sha512_final(struct crypto_tfm *tfm, u8 *hash) 215sha512_final(struct shash_desc *desc, u8 *hash)
209{ 216{
210 struct sha512_ctx *sctx = crypto_tfm_ctx(tfm); 217 struct sha512_ctx *sctx = shash_desc_ctx(desc);
211 static u8 padding[128] = { 0x80, }; 218 static u8 padding[128] = { 0x80, };
212 __be64 *dst = (__be64 *)hash; 219 __be64 *dst = (__be64 *)hash;
213 __be32 bits[4]; 220 __be32 bits[4];
@@ -223,10 +230,10 @@ sha512_final(struct crypto_tfm *tfm, u8 *hash)
223 /* Pad out to 112 mod 128. */ 230 /* Pad out to 112 mod 128. */
224 index = (sctx->count[0] >> 3) & 0x7f; 231 index = (sctx->count[0] >> 3) & 0x7f;
225 pad_len = (index < 112) ? (112 - index) : ((128+112) - index); 232 pad_len = (index < 112) ? (112 - index) : ((128+112) - index);
226 sha512_update(tfm, padding, pad_len); 233 sha512_update(desc, padding, pad_len);
227 234
228 /* Append length (before padding) */ 235 /* Append length (before padding) */
229 sha512_update(tfm, (const u8 *)bits, sizeof(bits)); 236 sha512_update(desc, (const u8 *)bits, sizeof(bits));
230 237
231 /* Store state in digest */ 238 /* Store state in digest */
232 for (i = 0; i < 8; i++) 239 for (i = 0; i < 8; i++)
@@ -234,66 +241,66 @@ sha512_final(struct crypto_tfm *tfm, u8 *hash)
234 241
235 /* Zeroize sensitive information. */ 242 /* Zeroize sensitive information. */
236 memset(sctx, 0, sizeof(struct sha512_ctx)); 243 memset(sctx, 0, sizeof(struct sha512_ctx));
244
245 return 0;
237} 246}
238 247
239static void sha384_final(struct crypto_tfm *tfm, u8 *hash) 248static int sha384_final(struct shash_desc *desc, u8 *hash)
240{ 249{
241 u8 D[64]; 250 u8 D[64];
251
252 sha512_final(desc, D);
242 253
243 sha512_final(tfm, D); 254 memcpy(hash, D, 48);
255 memset(D, 0, 64);
244 256
245 memcpy(hash, D, 48); 257 return 0;
246 memset(D, 0, 64);
247} 258}
248 259
249static struct crypto_alg sha512 = { 260static struct shash_alg sha512 = {
250 .cra_name = "sha512", 261 .digestsize = SHA512_DIGEST_SIZE,
251 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 262 .init = sha512_init,
252 .cra_blocksize = SHA512_BLOCK_SIZE, 263 .update = sha512_update,
253 .cra_ctxsize = sizeof(struct sha512_ctx), 264 .final = sha512_final,
254 .cra_module = THIS_MODULE, 265 .descsize = sizeof(struct sha512_ctx),
255 .cra_alignmask = 3, 266 .base = {
256 .cra_list = LIST_HEAD_INIT(sha512.cra_list), 267 .cra_name = "sha512",
257 .cra_u = { .digest = { 268 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
258 .dia_digestsize = SHA512_DIGEST_SIZE, 269 .cra_blocksize = SHA512_BLOCK_SIZE,
259 .dia_init = sha512_init, 270 .cra_module = THIS_MODULE,
260 .dia_update = sha512_update, 271 }
261 .dia_final = sha512_final }
262 }
263}; 272};
264 273
265static struct crypto_alg sha384 = { 274static struct shash_alg sha384 = {
266 .cra_name = "sha384", 275 .digestsize = SHA384_DIGEST_SIZE,
267 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 276 .init = sha384_init,
268 .cra_blocksize = SHA384_BLOCK_SIZE, 277 .update = sha512_update,
269 .cra_ctxsize = sizeof(struct sha512_ctx), 278 .final = sha384_final,
270 .cra_alignmask = 3, 279 .descsize = sizeof(struct sha512_ctx),
271 .cra_module = THIS_MODULE, 280 .base = {
272 .cra_list = LIST_HEAD_INIT(sha384.cra_list), 281 .cra_name = "sha384",
273 .cra_u = { .digest = { 282 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
274 .dia_digestsize = SHA384_DIGEST_SIZE, 283 .cra_blocksize = SHA384_BLOCK_SIZE,
275 .dia_init = sha384_init, 284 .cra_module = THIS_MODULE,
276 .dia_update = sha512_update, 285 }
277 .dia_final = sha384_final }
278 }
279}; 286};
280 287
281static int __init sha512_generic_mod_init(void) 288static int __init sha512_generic_mod_init(void)
282{ 289{
283 int ret = 0; 290 int ret = 0;
284 291
285 if ((ret = crypto_register_alg(&sha384)) < 0) 292 if ((ret = crypto_register_shash(&sha384)) < 0)
286 goto out; 293 goto out;
287 if ((ret = crypto_register_alg(&sha512)) < 0) 294 if ((ret = crypto_register_shash(&sha512)) < 0)
288 crypto_unregister_alg(&sha384); 295 crypto_unregister_shash(&sha384);
289out: 296out:
290 return ret; 297 return ret;
291} 298}
292 299
293static void __exit sha512_generic_mod_fini(void) 300static void __exit sha512_generic_mod_fini(void)
294{ 301{
295 crypto_unregister_alg(&sha384); 302 crypto_unregister_shash(&sha384);
296 crypto_unregister_alg(&sha512); 303 crypto_unregister_shash(&sha512);
297} 304}
298 305
299module_init(sha512_generic_mod_init); 306module_init(sha512_generic_mod_init);
diff --git a/crypto/shash.c b/crypto/shash.c
new file mode 100644
index 000000000000..c9df367332ff
--- /dev/null
+++ b/crypto/shash.c
@@ -0,0 +1,508 @@
1/*
2 * Synchronous Cryptographic Hash operations.
3 *
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#include <crypto/scatterwalk.h>
14#include <crypto/internal/hash.h>
15#include <linux/err.h>
16#include <linux/kernel.h>
17#include <linux/module.h>
18#include <linux/slab.h>
19#include <linux/seq_file.h>
20
21static const struct crypto_type crypto_shash_type;
22
23static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm)
24{
25 return container_of(tfm, struct crypto_shash, base);
26}
27
28#include "internal.h"
29
30static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
31 unsigned int keylen)
32{
33 struct shash_alg *shash = crypto_shash_alg(tfm);
34 unsigned long alignmask = crypto_shash_alignmask(tfm);
35 unsigned long absize;
36 u8 *buffer, *alignbuffer;
37 int err;
38
39 absize = keylen + (alignmask & ~(CRYPTO_MINALIGN - 1));
40 buffer = kmalloc(absize, GFP_KERNEL);
41 if (!buffer)
42 return -ENOMEM;
43
44 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
45 memcpy(alignbuffer, key, keylen);
46 err = shash->setkey(tfm, alignbuffer, keylen);
47 memset(alignbuffer, 0, keylen);
48 kfree(buffer);
49 return err;
50}
51
52int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
53 unsigned int keylen)
54{
55 struct shash_alg *shash = crypto_shash_alg(tfm);
56 unsigned long alignmask = crypto_shash_alignmask(tfm);
57
58 if (!shash->setkey)
59 return -ENOSYS;
60
61 if ((unsigned long)key & alignmask)
62 return shash_setkey_unaligned(tfm, key, keylen);
63
64 return shash->setkey(tfm, key, keylen);
65}
66EXPORT_SYMBOL_GPL(crypto_shash_setkey);
67
68static inline unsigned int shash_align_buffer_size(unsigned len,
69 unsigned long mask)
70{
71 return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
72}
73
74static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
75 unsigned int len)
76{
77 struct crypto_shash *tfm = desc->tfm;
78 struct shash_alg *shash = crypto_shash_alg(tfm);
79 unsigned long alignmask = crypto_shash_alignmask(tfm);
80 unsigned int unaligned_len = alignmask + 1 -
81 ((unsigned long)data & alignmask);
82 u8 buf[shash_align_buffer_size(unaligned_len, alignmask)]
83 __attribute__ ((aligned));
84
85 memcpy(buf, data, unaligned_len);
86
87 return shash->update(desc, buf, unaligned_len) ?:
88 shash->update(desc, data + unaligned_len, len - unaligned_len);
89}
90
91int crypto_shash_update(struct shash_desc *desc, const u8 *data,
92 unsigned int len)
93{
94 struct crypto_shash *tfm = desc->tfm;
95 struct shash_alg *shash = crypto_shash_alg(tfm);
96 unsigned long alignmask = crypto_shash_alignmask(tfm);
97
98 if ((unsigned long)data & alignmask)
99 return shash_update_unaligned(desc, data, len);
100
101 return shash->update(desc, data, len);
102}
103EXPORT_SYMBOL_GPL(crypto_shash_update);
104
105static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
106{
107 struct crypto_shash *tfm = desc->tfm;
108 unsigned long alignmask = crypto_shash_alignmask(tfm);
109 struct shash_alg *shash = crypto_shash_alg(tfm);
110 unsigned int ds = crypto_shash_digestsize(tfm);
111 u8 buf[shash_align_buffer_size(ds, alignmask)]
112 __attribute__ ((aligned));
113 int err;
114
115 err = shash->final(desc, buf);
116 memcpy(out, buf, ds);
117 return err;
118}
119
120int crypto_shash_final(struct shash_desc *desc, u8 *out)
121{
122 struct crypto_shash *tfm = desc->tfm;
123 struct shash_alg *shash = crypto_shash_alg(tfm);
124 unsigned long alignmask = crypto_shash_alignmask(tfm);
125
126 if ((unsigned long)out & alignmask)
127 return shash_final_unaligned(desc, out);
128
129 return shash->final(desc, out);
130}
131EXPORT_SYMBOL_GPL(crypto_shash_final);
132
133static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
134 unsigned int len, u8 *out)
135{
136 return crypto_shash_update(desc, data, len) ?:
137 crypto_shash_final(desc, out);
138}
139
140int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
141 unsigned int len, u8 *out)
142{
143 struct crypto_shash *tfm = desc->tfm;
144 struct shash_alg *shash = crypto_shash_alg(tfm);
145 unsigned long alignmask = crypto_shash_alignmask(tfm);
146
147 if (((unsigned long)data | (unsigned long)out) & alignmask ||
148 !shash->finup)
149 return shash_finup_unaligned(desc, data, len, out);
150
151 return shash->finup(desc, data, len, out);
152}
153EXPORT_SYMBOL_GPL(crypto_shash_finup);
154
155static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
156 unsigned int len, u8 *out)
157{
158 return crypto_shash_init(desc) ?:
159 crypto_shash_update(desc, data, len) ?:
160 crypto_shash_final(desc, out);
161}
162
163int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
164 unsigned int len, u8 *out)
165{
166 struct crypto_shash *tfm = desc->tfm;
167 struct shash_alg *shash = crypto_shash_alg(tfm);
168 unsigned long alignmask = crypto_shash_alignmask(tfm);
169
170 if (((unsigned long)data | (unsigned long)out) & alignmask ||
171 !shash->digest)
172 return shash_digest_unaligned(desc, data, len, out);
173
174 return shash->digest(desc, data, len, out);
175}
176EXPORT_SYMBOL_GPL(crypto_shash_digest);
177
178int crypto_shash_import(struct shash_desc *desc, const u8 *in)
179{
180 struct crypto_shash *tfm = desc->tfm;
181 struct shash_alg *alg = crypto_shash_alg(tfm);
182
183 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
184
185 if (alg->reinit)
186 alg->reinit(desc);
187
188 return 0;
189}
190EXPORT_SYMBOL_GPL(crypto_shash_import);
191
192static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
193 unsigned int keylen)
194{
195 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
196
197 return crypto_shash_setkey(*ctx, key, keylen);
198}
199
200static int shash_async_init(struct ahash_request *req)
201{
202 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
203 struct shash_desc *desc = ahash_request_ctx(req);
204
205 desc->tfm = *ctx;
206 desc->flags = req->base.flags;
207
208 return crypto_shash_init(desc);
209}
210
211static int shash_async_update(struct ahash_request *req)
212{
213 struct shash_desc *desc = ahash_request_ctx(req);
214 struct crypto_hash_walk walk;
215 int nbytes;
216
217 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
218 nbytes = crypto_hash_walk_done(&walk, nbytes))
219 nbytes = crypto_shash_update(desc, walk.data, nbytes);
220
221 return nbytes;
222}
223
224static int shash_async_final(struct ahash_request *req)
225{
226 return crypto_shash_final(ahash_request_ctx(req), req->result);
227}
228
229static int shash_async_digest(struct ahash_request *req)
230{
231 struct scatterlist *sg = req->src;
232 unsigned int offset = sg->offset;
233 unsigned int nbytes = req->nbytes;
234 int err;
235
236 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
237 struct crypto_shash **ctx =
238 crypto_ahash_ctx(crypto_ahash_reqtfm(req));
239 struct shash_desc *desc = ahash_request_ctx(req);
240 void *data;
241
242 desc->tfm = *ctx;
243 desc->flags = req->base.flags;
244
245 data = crypto_kmap(sg_page(sg), 0);
246 err = crypto_shash_digest(desc, data + offset, nbytes,
247 req->result);
248 crypto_kunmap(data, 0);
249 crypto_yield(desc->flags);
250 goto out;
251 }
252
253 err = shash_async_init(req);
254 if (err)
255 goto out;
256
257 err = shash_async_update(req);
258 if (err)
259 goto out;
260
261 err = shash_async_final(req);
262
263out:
264 return err;
265}
266
267static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
268{
269 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
270
271 crypto_free_shash(*ctx);
272}
273
274static int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
275{
276 struct crypto_alg *calg = tfm->__crt_alg;
277 struct shash_alg *alg = __crypto_shash_alg(calg);
278 struct ahash_tfm *crt = &tfm->crt_ahash;
279 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
280 struct crypto_shash *shash;
281
282 if (!crypto_mod_get(calg))
283 return -EAGAIN;
284
285 shash = __crypto_shash_cast(crypto_create_tfm(
286 calg, &crypto_shash_type));
287 if (IS_ERR(shash)) {
288 crypto_mod_put(calg);
289 return PTR_ERR(shash);
290 }
291
292 *ctx = shash;
293 tfm->exit = crypto_exit_shash_ops_async;
294
295 crt->init = shash_async_init;
296 crt->update = shash_async_update;
297 crt->final = shash_async_final;
298 crt->digest = shash_async_digest;
299 crt->setkey = shash_async_setkey;
300
301 crt->digestsize = alg->digestsize;
302 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
303
304 return 0;
305}
306
307static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
308 unsigned int keylen)
309{
310 struct shash_desc *desc = crypto_hash_ctx(tfm);
311
312 return crypto_shash_setkey(desc->tfm, key, keylen);
313}
314
315static int shash_compat_init(struct hash_desc *hdesc)
316{
317 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
318
319 desc->flags = hdesc->flags;
320
321 return crypto_shash_init(desc);
322}
323
324static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
325 unsigned int len)
326{
327 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
328 struct crypto_hash_walk walk;
329 int nbytes;
330
331 for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
332 nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
333 nbytes = crypto_shash_update(desc, walk.data, nbytes);
334
335 return nbytes;
336}
337
338static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
339{
340 return crypto_shash_final(crypto_hash_ctx(hdesc->tfm), out);
341}
342
343static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
344 unsigned int nbytes, u8 *out)
345{
346 unsigned int offset = sg->offset;
347 int err;
348
349 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
350 struct shash_desc *desc = crypto_hash_ctx(hdesc->tfm);
351 void *data;
352
353 desc->flags = hdesc->flags;
354
355 data = crypto_kmap(sg_page(sg), 0);
356 err = crypto_shash_digest(desc, data + offset, nbytes, out);
357 crypto_kunmap(data, 0);
358 crypto_yield(desc->flags);
359 goto out;
360 }
361
362 err = shash_compat_init(hdesc);
363 if (err)
364 goto out;
365
366 err = shash_compat_update(hdesc, sg, nbytes);
367 if (err)
368 goto out;
369
370 err = shash_compat_final(hdesc, out);
371
372out:
373 return err;
374}
375
376static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
377{
378 struct shash_desc *desc= crypto_tfm_ctx(tfm);
379
380 crypto_free_shash(desc->tfm);
381}
382
383static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
384{
385 struct hash_tfm *crt = &tfm->crt_hash;
386 struct crypto_alg *calg = tfm->__crt_alg;
387 struct shash_alg *alg = __crypto_shash_alg(calg);
388 struct shash_desc *desc = crypto_tfm_ctx(tfm);
389 struct crypto_shash *shash;
390
391 shash = __crypto_shash_cast(crypto_create_tfm(
392 calg, &crypto_shash_type));
393 if (IS_ERR(shash))
394 return PTR_ERR(shash);
395
396 desc->tfm = shash;
397 tfm->exit = crypto_exit_shash_ops_compat;
398
399 crt->init = shash_compat_init;
400 crt->update = shash_compat_update;
401 crt->final = shash_compat_final;
402 crt->digest = shash_compat_digest;
403 crt->setkey = shash_compat_setkey;
404
405 crt->digestsize = alg->digestsize;
406
407 return 0;
408}
409
410static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
411{
412 switch (mask & CRYPTO_ALG_TYPE_MASK) {
413 case CRYPTO_ALG_TYPE_HASH_MASK:
414 return crypto_init_shash_ops_compat(tfm);
415 case CRYPTO_ALG_TYPE_AHASH_MASK:
416 return crypto_init_shash_ops_async(tfm);
417 }
418
419 return -EINVAL;
420}
421
422static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
423 u32 mask)
424{
425 struct shash_alg *salg = __crypto_shash_alg(alg);
426
427 switch (mask & CRYPTO_ALG_TYPE_MASK) {
428 case CRYPTO_ALG_TYPE_HASH_MASK:
429 return sizeof(struct shash_desc) + salg->descsize;
430 case CRYPTO_ALG_TYPE_AHASH_MASK:
431 return sizeof(struct crypto_shash *);
432 }
433
434 return 0;
435}
436
437static int crypto_shash_init_tfm(struct crypto_tfm *tfm,
438 const struct crypto_type *frontend)
439{
440 if (frontend->type != CRYPTO_ALG_TYPE_SHASH)
441 return -EINVAL;
442 return 0;
443}
444
445static unsigned int crypto_shash_extsize(struct crypto_alg *alg,
446 const struct crypto_type *frontend)
447{
448 return alg->cra_ctxsize;
449}
450
451static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
452 __attribute__ ((unused));
453static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
454{
455 struct shash_alg *salg = __crypto_shash_alg(alg);
456
457 seq_printf(m, "type : shash\n");
458 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
459 seq_printf(m, "digestsize : %u\n", salg->digestsize);
460 seq_printf(m, "descsize : %u\n", salg->descsize);
461}
462
463static const struct crypto_type crypto_shash_type = {
464 .ctxsize = crypto_shash_ctxsize,
465 .extsize = crypto_shash_extsize,
466 .init = crypto_init_shash_ops,
467 .init_tfm = crypto_shash_init_tfm,
468#ifdef CONFIG_PROC_FS
469 .show = crypto_shash_show,
470#endif
471 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
472 .maskset = CRYPTO_ALG_TYPE_MASK,
473 .type = CRYPTO_ALG_TYPE_SHASH,
474 .tfmsize = offsetof(struct crypto_shash, base),
475};
476
477struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
478 u32 mask)
479{
480 return __crypto_shash_cast(
481 crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask));
482}
483EXPORT_SYMBOL_GPL(crypto_alloc_shash);
484
485int crypto_register_shash(struct shash_alg *alg)
486{
487 struct crypto_alg *base = &alg->base;
488
489 if (alg->digestsize > PAGE_SIZE / 8 ||
490 alg->descsize > PAGE_SIZE / 8)
491 return -EINVAL;
492
493 base->cra_type = &crypto_shash_type;
494 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
495 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
496
497 return crypto_register_alg(base);
498}
499EXPORT_SYMBOL_GPL(crypto_register_shash);
500
501int crypto_unregister_shash(struct shash_alg *alg)
502{
503 return crypto_unregister_alg(&alg->base);
504}
505EXPORT_SYMBOL_GPL(crypto_unregister_shash);
506
507MODULE_LICENSE("GPL");
508MODULE_DESCRIPTION("Synchronous cryptographic hash type");
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
index b828c6cf1b1d..a75f11ffb957 100644
--- a/crypto/testmgr.c
+++ b/crypto/testmgr.c
@@ -843,6 +843,14 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
843 goto out; 843 goto out;
844 } 844 }
845 845
846 if (dlen != ctemplate[i].outlen) {
847 printk(KERN_ERR "alg: comp: Compression test %d "
848 "failed for %s: output len = %d\n", i + 1, algo,
849 dlen);
850 ret = -EINVAL;
851 goto out;
852 }
853
846 if (memcmp(result, ctemplate[i].output, dlen)) { 854 if (memcmp(result, ctemplate[i].output, dlen)) {
847 printk(KERN_ERR "alg: comp: Compression test %d " 855 printk(KERN_ERR "alg: comp: Compression test %d "
848 "failed for %s\n", i + 1, algo); 856 "failed for %s\n", i + 1, algo);
@@ -853,7 +861,7 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
853 } 861 }
854 862
855 for (i = 0; i < dtcount; i++) { 863 for (i = 0; i < dtcount; i++) {
856 int ilen, ret, dlen = COMP_BUF_SIZE; 864 int ilen, dlen = COMP_BUF_SIZE;
857 865
858 memset(result, 0, sizeof (result)); 866 memset(result, 0, sizeof (result));
859 867
@@ -867,6 +875,14 @@ static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
867 goto out; 875 goto out;
868 } 876 }
869 877
878 if (dlen != dtemplate[i].outlen) {
879 printk(KERN_ERR "alg: comp: Decompression test %d "
880 "failed for %s: output len = %d\n", i + 1, algo,
881 dlen);
882 ret = -EINVAL;
883 goto out;
884 }
885
870 if (memcmp(result, dtemplate[i].output, dlen)) { 886 if (memcmp(result, dtemplate[i].output, dlen)) {
871 printk(KERN_ERR "alg: comp: Decompression test %d " 887 printk(KERN_ERR "alg: comp: Decompression test %d "
872 "failed for %s\n", i + 1, algo); 888 "failed for %s\n", i + 1, algo);
@@ -1010,6 +1026,55 @@ static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1010 return err; 1026 return err;
1011} 1027}
1012 1028
1029static int alg_test_crc32c(const struct alg_test_desc *desc,
1030 const char *driver, u32 type, u32 mask)
1031{
1032 struct crypto_shash *tfm;
1033 u32 val;
1034 int err;
1035
1036 err = alg_test_hash(desc, driver, type, mask);
1037 if (err)
1038 goto out;
1039
1040 tfm = crypto_alloc_shash(driver, type, mask);
1041 if (IS_ERR(tfm)) {
1042 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1043 "%ld\n", driver, PTR_ERR(tfm));
1044 err = PTR_ERR(tfm);
1045 goto out;
1046 }
1047
1048 do {
1049 struct {
1050 struct shash_desc shash;
1051 char ctx[crypto_shash_descsize(tfm)];
1052 } sdesc;
1053
1054 sdesc.shash.tfm = tfm;
1055 sdesc.shash.flags = 0;
1056
1057 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1058 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1059 if (err) {
1060 printk(KERN_ERR "alg: crc32c: Operation failed for "
1061 "%s: %d\n", driver, err);
1062 break;
1063 }
1064
1065 if (val != ~420553207) {
1066 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1067 "%d\n", driver, val);
1068 err = -EINVAL;
1069 }
1070 } while (0);
1071
1072 crypto_free_shash(tfm);
1073
1074out:
1075 return err;
1076}
1077
1013/* Please keep this list sorted by algorithm name. */ 1078/* Please keep this list sorted by algorithm name. */
1014static const struct alg_test_desc alg_test_descs[] = { 1079static const struct alg_test_desc alg_test_descs[] = {
1015 { 1080 {
@@ -1134,7 +1199,7 @@ static const struct alg_test_desc alg_test_descs[] = {
1134 } 1199 }
1135 }, { 1200 }, {
1136 .alg = "crc32c", 1201 .alg = "crc32c",
1137 .test = alg_test_hash, 1202 .test = alg_test_crc32c,
1138 .suite = { 1203 .suite = {
1139 .hash = { 1204 .hash = {
1140 .vecs = crc32c_tv_template, 1205 .vecs = crc32c_tv_template,
@@ -1801,6 +1866,7 @@ static int alg_find_test(const char *alg)
1801int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 1866int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
1802{ 1867{
1803 int i; 1868 int i;
1869 int rc;
1804 1870
1805 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 1871 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
1806 char nalg[CRYPTO_MAX_ALG_NAME]; 1872 char nalg[CRYPTO_MAX_ALG_NAME];
@@ -1820,8 +1886,12 @@ int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
1820 if (i < 0) 1886 if (i < 0)
1821 goto notest; 1887 goto notest;
1822 1888
1823 return alg_test_descs[i].test(alg_test_descs + i, driver, 1889 rc = alg_test_descs[i].test(alg_test_descs + i, driver,
1824 type, mask); 1890 type, mask);
1891 if (fips_enabled && rc)
1892 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
1893
1894 return rc;
1825 1895
1826notest: 1896notest:
1827 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 1897 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
diff --git a/crypto/testmgr.h b/crypto/testmgr.h
index dee94d9ecfba..132953e144d3 100644
--- a/crypto/testmgr.h
+++ b/crypto/testmgr.h
@@ -8349,7 +8349,7 @@ struct comp_testvec {
8349 8349
8350/* 8350/*
8351 * Deflate test vectors (null-terminated strings). 8351 * Deflate test vectors (null-terminated strings).
8352 * Params: winbits=11, Z_DEFAULT_COMPRESSION, MAX_MEM_LEVEL. 8352 * Params: winbits=-11, Z_DEFAULT_COMPRESSION, MAX_MEM_LEVEL.
8353 */ 8353 */
8354#define DEFLATE_COMP_TEST_VECTORS 2 8354#define DEFLATE_COMP_TEST_VECTORS 2
8355#define DEFLATE_DECOMP_TEST_VECTORS 2 8355#define DEFLATE_DECOMP_TEST_VECTORS 2
diff --git a/crypto/tgr192.c b/crypto/tgr192.c
index a92414f24beb..cbca4f208c9f 100644
--- a/crypto/tgr192.c
+++ b/crypto/tgr192.c
@@ -21,11 +21,11 @@
21 * (at your option) any later version. 21 * (at your option) any later version.
22 * 22 *
23 */ 23 */
24#include <crypto/internal/hash.h>
24#include <linux/init.h> 25#include <linux/init.h>
25#include <linux/module.h> 26#include <linux/module.h>
26#include <linux/mm.h> 27#include <linux/mm.h>
27#include <asm/byteorder.h> 28#include <asm/byteorder.h>
28#include <linux/crypto.h>
29#include <linux/types.h> 29#include <linux/types.h>
30 30
31#define TGR192_DIGEST_SIZE 24 31#define TGR192_DIGEST_SIZE 24
@@ -495,24 +495,26 @@ static void tgr192_transform(struct tgr192_ctx *tctx, const u8 * data)
495 tctx->c = c; 495 tctx->c = c;
496} 496}
497 497
498static void tgr192_init(struct crypto_tfm *tfm) 498static int tgr192_init(struct shash_desc *desc)
499{ 499{
500 struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm); 500 struct tgr192_ctx *tctx = shash_desc_ctx(desc);
501 501
502 tctx->a = 0x0123456789abcdefULL; 502 tctx->a = 0x0123456789abcdefULL;
503 tctx->b = 0xfedcba9876543210ULL; 503 tctx->b = 0xfedcba9876543210ULL;
504 tctx->c = 0xf096a5b4c3b2e187ULL; 504 tctx->c = 0xf096a5b4c3b2e187ULL;
505 tctx->nblocks = 0; 505 tctx->nblocks = 0;
506 tctx->count = 0; 506 tctx->count = 0;
507
508 return 0;
507} 509}
508 510
509 511
510/* Update the message digest with the contents 512/* Update the message digest with the contents
511 * of INBUF with length INLEN. */ 513 * of INBUF with length INLEN. */
512static void tgr192_update(struct crypto_tfm *tfm, const u8 *inbuf, 514static int tgr192_update(struct shash_desc *desc, const u8 *inbuf,
513 unsigned int len) 515 unsigned int len)
514{ 516{
515 struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm); 517 struct tgr192_ctx *tctx = shash_desc_ctx(desc);
516 518
517 if (tctx->count == 64) { /* flush the buffer */ 519 if (tctx->count == 64) { /* flush the buffer */
518 tgr192_transform(tctx, tctx->hash); 520 tgr192_transform(tctx, tctx->hash);
@@ -520,15 +522,15 @@ static void tgr192_update(struct crypto_tfm *tfm, const u8 *inbuf,
520 tctx->nblocks++; 522 tctx->nblocks++;
521 } 523 }
522 if (!inbuf) { 524 if (!inbuf) {
523 return; 525 return 0;
524 } 526 }
525 if (tctx->count) { 527 if (tctx->count) {
526 for (; len && tctx->count < 64; len--) { 528 for (; len && tctx->count < 64; len--) {
527 tctx->hash[tctx->count++] = *inbuf++; 529 tctx->hash[tctx->count++] = *inbuf++;
528 } 530 }
529 tgr192_update(tfm, NULL, 0); 531 tgr192_update(desc, NULL, 0);
530 if (!len) { 532 if (!len) {
531 return; 533 return 0;
532 } 534 }
533 535
534 } 536 }
@@ -543,20 +545,22 @@ static void tgr192_update(struct crypto_tfm *tfm, const u8 *inbuf,
543 for (; len && tctx->count < 64; len--) { 545 for (; len && tctx->count < 64; len--) {
544 tctx->hash[tctx->count++] = *inbuf++; 546 tctx->hash[tctx->count++] = *inbuf++;
545 } 547 }
548
549 return 0;
546} 550}
547 551
548 552
549 553
550/* The routine terminates the computation */ 554/* The routine terminates the computation */
551static void tgr192_final(struct crypto_tfm *tfm, u8 * out) 555static int tgr192_final(struct shash_desc *desc, u8 * out)
552{ 556{
553 struct tgr192_ctx *tctx = crypto_tfm_ctx(tfm); 557 struct tgr192_ctx *tctx = shash_desc_ctx(desc);
554 __be64 *dst = (__be64 *)out; 558 __be64 *dst = (__be64 *)out;
555 __be64 *be64p; 559 __be64 *be64p;
556 __le32 *le32p; 560 __le32 *le32p;
557 u32 t, msb, lsb; 561 u32 t, msb, lsb;
558 562
559 tgr192_update(tfm, NULL, 0); /* flush */ ; 563 tgr192_update(desc, NULL, 0); /* flush */ ;
560 564
561 msb = 0; 565 msb = 0;
562 t = tctx->nblocks; 566 t = tctx->nblocks;
@@ -584,7 +588,7 @@ static void tgr192_final(struct crypto_tfm *tfm, u8 * out)
584 while (tctx->count < 64) { 588 while (tctx->count < 64) {
585 tctx->hash[tctx->count++] = 0; 589 tctx->hash[tctx->count++] = 0;
586 } 590 }
587 tgr192_update(tfm, NULL, 0); /* flush */ ; 591 tgr192_update(desc, NULL, 0); /* flush */ ;
588 memset(tctx->hash, 0, 56); /* fill next block with zeroes */ 592 memset(tctx->hash, 0, 56); /* fill next block with zeroes */
589 } 593 }
590 /* append the 64 bit count */ 594 /* append the 64 bit count */
@@ -598,91 +602,94 @@ static void tgr192_final(struct crypto_tfm *tfm, u8 * out)
598 dst[0] = be64p[0] = cpu_to_be64(tctx->a); 602 dst[0] = be64p[0] = cpu_to_be64(tctx->a);
599 dst[1] = be64p[1] = cpu_to_be64(tctx->b); 603 dst[1] = be64p[1] = cpu_to_be64(tctx->b);
600 dst[2] = be64p[2] = cpu_to_be64(tctx->c); 604 dst[2] = be64p[2] = cpu_to_be64(tctx->c);
605
606 return 0;
601} 607}
602 608
603static void tgr160_final(struct crypto_tfm *tfm, u8 * out) 609static int tgr160_final(struct shash_desc *desc, u8 * out)
604{ 610{
605 u8 D[64]; 611 u8 D[64];
606 612
607 tgr192_final(tfm, D); 613 tgr192_final(desc, D);
608 memcpy(out, D, TGR160_DIGEST_SIZE); 614 memcpy(out, D, TGR160_DIGEST_SIZE);
609 memset(D, 0, TGR192_DIGEST_SIZE); 615 memset(D, 0, TGR192_DIGEST_SIZE);
616
617 return 0;
610} 618}
611 619
612static void tgr128_final(struct crypto_tfm *tfm, u8 * out) 620static int tgr128_final(struct shash_desc *desc, u8 * out)
613{ 621{
614 u8 D[64]; 622 u8 D[64];
615 623
616 tgr192_final(tfm, D); 624 tgr192_final(desc, D);
617 memcpy(out, D, TGR128_DIGEST_SIZE); 625 memcpy(out, D, TGR128_DIGEST_SIZE);
618 memset(D, 0, TGR192_DIGEST_SIZE); 626 memset(D, 0, TGR192_DIGEST_SIZE);
627
628 return 0;
619} 629}
620 630
621static struct crypto_alg tgr192 = { 631static struct shash_alg tgr192 = {
622 .cra_name = "tgr192", 632 .digestsize = TGR192_DIGEST_SIZE,
623 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 633 .init = tgr192_init,
624 .cra_blocksize = TGR192_BLOCK_SIZE, 634 .update = tgr192_update,
625 .cra_ctxsize = sizeof(struct tgr192_ctx), 635 .final = tgr192_final,
626 .cra_module = THIS_MODULE, 636 .descsize = sizeof(struct tgr192_ctx),
627 .cra_alignmask = 7, 637 .base = {
628 .cra_list = LIST_HEAD_INIT(tgr192.cra_list), 638 .cra_name = "tgr192",
629 .cra_u = {.digest = { 639 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
630 .dia_digestsize = TGR192_DIGEST_SIZE, 640 .cra_blocksize = TGR192_BLOCK_SIZE,
631 .dia_init = tgr192_init, 641 .cra_module = THIS_MODULE,
632 .dia_update = tgr192_update, 642 }
633 .dia_final = tgr192_final}}
634}; 643};
635 644
636static struct crypto_alg tgr160 = { 645static struct shash_alg tgr160 = {
637 .cra_name = "tgr160", 646 .digestsize = TGR160_DIGEST_SIZE,
638 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 647 .init = tgr192_init,
639 .cra_blocksize = TGR192_BLOCK_SIZE, 648 .update = tgr192_update,
640 .cra_ctxsize = sizeof(struct tgr192_ctx), 649 .final = tgr160_final,
641 .cra_module = THIS_MODULE, 650 .descsize = sizeof(struct tgr192_ctx),
642 .cra_alignmask = 7, 651 .base = {
643 .cra_list = LIST_HEAD_INIT(tgr160.cra_list), 652 .cra_name = "tgr160",
644 .cra_u = {.digest = { 653 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
645 .dia_digestsize = TGR160_DIGEST_SIZE, 654 .cra_blocksize = TGR192_BLOCK_SIZE,
646 .dia_init = tgr192_init, 655 .cra_module = THIS_MODULE,
647 .dia_update = tgr192_update, 656 }
648 .dia_final = tgr160_final}}
649}; 657};
650 658
651static struct crypto_alg tgr128 = { 659static struct shash_alg tgr128 = {
652 .cra_name = "tgr128", 660 .digestsize = TGR128_DIGEST_SIZE,
653 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 661 .init = tgr192_init,
654 .cra_blocksize = TGR192_BLOCK_SIZE, 662 .update = tgr192_update,
655 .cra_ctxsize = sizeof(struct tgr192_ctx), 663 .final = tgr128_final,
656 .cra_module = THIS_MODULE, 664 .descsize = sizeof(struct tgr192_ctx),
657 .cra_alignmask = 7, 665 .base = {
658 .cra_list = LIST_HEAD_INIT(tgr128.cra_list), 666 .cra_name = "tgr128",
659 .cra_u = {.digest = { 667 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
660 .dia_digestsize = TGR128_DIGEST_SIZE, 668 .cra_blocksize = TGR192_BLOCK_SIZE,
661 .dia_init = tgr192_init, 669 .cra_module = THIS_MODULE,
662 .dia_update = tgr192_update, 670 }
663 .dia_final = tgr128_final}}
664}; 671};
665 672
666static int __init tgr192_mod_init(void) 673static int __init tgr192_mod_init(void)
667{ 674{
668 int ret = 0; 675 int ret = 0;
669 676
670 ret = crypto_register_alg(&tgr192); 677 ret = crypto_register_shash(&tgr192);
671 678
672 if (ret < 0) { 679 if (ret < 0) {
673 goto out; 680 goto out;
674 } 681 }
675 682
676 ret = crypto_register_alg(&tgr160); 683 ret = crypto_register_shash(&tgr160);
677 if (ret < 0) { 684 if (ret < 0) {
678 crypto_unregister_alg(&tgr192); 685 crypto_unregister_shash(&tgr192);
679 goto out; 686 goto out;
680 } 687 }
681 688
682 ret = crypto_register_alg(&tgr128); 689 ret = crypto_register_shash(&tgr128);
683 if (ret < 0) { 690 if (ret < 0) {
684 crypto_unregister_alg(&tgr192); 691 crypto_unregister_shash(&tgr192);
685 crypto_unregister_alg(&tgr160); 692 crypto_unregister_shash(&tgr160);
686 } 693 }
687 out: 694 out:
688 return ret; 695 return ret;
@@ -690,9 +697,9 @@ static int __init tgr192_mod_init(void)
690 697
691static void __exit tgr192_mod_fini(void) 698static void __exit tgr192_mod_fini(void)
692{ 699{
693 crypto_unregister_alg(&tgr192); 700 crypto_unregister_shash(&tgr192);
694 crypto_unregister_alg(&tgr160); 701 crypto_unregister_shash(&tgr160);
695 crypto_unregister_alg(&tgr128); 702 crypto_unregister_shash(&tgr128);
696} 703}
697 704
698MODULE_ALIAS("tgr160"); 705MODULE_ALIAS("tgr160");
diff --git a/crypto/wp512.c b/crypto/wp512.c
index bff28560d66d..723427273687 100644
--- a/crypto/wp512.c
+++ b/crypto/wp512.c
@@ -19,11 +19,11 @@
19 * (at your option) any later version. 19 * (at your option) any later version.
20 * 20 *
21 */ 21 */
22#include <crypto/internal/hash.h>
22#include <linux/init.h> 23#include <linux/init.h>
23#include <linux/module.h> 24#include <linux/module.h>
24#include <linux/mm.h> 25#include <linux/mm.h>
25#include <asm/byteorder.h> 26#include <asm/byteorder.h>
26#include <linux/crypto.h>
27#include <linux/types.h> 27#include <linux/types.h>
28 28
29#define WP512_DIGEST_SIZE 64 29#define WP512_DIGEST_SIZE 64
@@ -980,8 +980,8 @@ static void wp512_process_buffer(struct wp512_ctx *wctx) {
980 980
981} 981}
982 982
983static void wp512_init(struct crypto_tfm *tfm) { 983static int wp512_init(struct shash_desc *desc) {
984 struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); 984 struct wp512_ctx *wctx = shash_desc_ctx(desc);
985 int i; 985 int i;
986 986
987 memset(wctx->bitLength, 0, 32); 987 memset(wctx->bitLength, 0, 32);
@@ -990,12 +990,14 @@ static void wp512_init(struct crypto_tfm *tfm) {
990 for (i = 0; i < 8; i++) { 990 for (i = 0; i < 8; i++) {
991 wctx->hash[i] = 0L; 991 wctx->hash[i] = 0L;
992 } 992 }
993
994 return 0;
993} 995}
994 996
995static void wp512_update(struct crypto_tfm *tfm, const u8 *source, 997static int wp512_update(struct shash_desc *desc, const u8 *source,
996 unsigned int len) 998 unsigned int len)
997{ 999{
998 struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); 1000 struct wp512_ctx *wctx = shash_desc_ctx(desc);
999 int sourcePos = 0; 1001 int sourcePos = 0;
1000 unsigned int bits_len = len * 8; // convert to number of bits 1002 unsigned int bits_len = len * 8; // convert to number of bits
1001 int sourceGap = (8 - ((int)bits_len & 7)) & 7; 1003 int sourceGap = (8 - ((int)bits_len & 7)) & 7;
@@ -1051,11 +1053,12 @@ static void wp512_update(struct crypto_tfm *tfm, const u8 *source,
1051 wctx->bufferBits = bufferBits; 1053 wctx->bufferBits = bufferBits;
1052 wctx->bufferPos = bufferPos; 1054 wctx->bufferPos = bufferPos;
1053 1055
1056 return 0;
1054} 1057}
1055 1058
1056static void wp512_final(struct crypto_tfm *tfm, u8 *out) 1059static int wp512_final(struct shash_desc *desc, u8 *out)
1057{ 1060{
1058 struct wp512_ctx *wctx = crypto_tfm_ctx(tfm); 1061 struct wp512_ctx *wctx = shash_desc_ctx(desc);
1059 int i; 1062 int i;
1060 u8 *buffer = wctx->buffer; 1063 u8 *buffer = wctx->buffer;
1061 u8 *bitLength = wctx->bitLength; 1064 u8 *bitLength = wctx->bitLength;
@@ -1084,89 +1087,95 @@ static void wp512_final(struct crypto_tfm *tfm, u8 *out)
1084 digest[i] = cpu_to_be64(wctx->hash[i]); 1087 digest[i] = cpu_to_be64(wctx->hash[i]);
1085 wctx->bufferBits = bufferBits; 1088 wctx->bufferBits = bufferBits;
1086 wctx->bufferPos = bufferPos; 1089 wctx->bufferPos = bufferPos;
1090
1091 return 0;
1087} 1092}
1088 1093
1089static void wp384_final(struct crypto_tfm *tfm, u8 *out) 1094static int wp384_final(struct shash_desc *desc, u8 *out)
1090{ 1095{
1091 u8 D[64]; 1096 u8 D[64];
1092 1097
1093 wp512_final(tfm, D); 1098 wp512_final(desc, D);
1094 memcpy (out, D, WP384_DIGEST_SIZE); 1099 memcpy (out, D, WP384_DIGEST_SIZE);
1095 memset (D, 0, WP512_DIGEST_SIZE); 1100 memset (D, 0, WP512_DIGEST_SIZE);
1101
1102 return 0;
1096} 1103}
1097 1104
1098static void wp256_final(struct crypto_tfm *tfm, u8 *out) 1105static int wp256_final(struct shash_desc *desc, u8 *out)
1099{ 1106{
1100 u8 D[64]; 1107 u8 D[64];
1101 1108
1102 wp512_final(tfm, D); 1109 wp512_final(desc, D);
1103 memcpy (out, D, WP256_DIGEST_SIZE); 1110 memcpy (out, D, WP256_DIGEST_SIZE);
1104 memset (D, 0, WP512_DIGEST_SIZE); 1111 memset (D, 0, WP512_DIGEST_SIZE);
1112
1113 return 0;
1105} 1114}
1106 1115
1107static struct crypto_alg wp512 = { 1116static struct shash_alg wp512 = {
1108 .cra_name = "wp512", 1117 .digestsize = WP512_DIGEST_SIZE,
1109 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 1118 .init = wp512_init,
1110 .cra_blocksize = WP512_BLOCK_SIZE, 1119 .update = wp512_update,
1111 .cra_ctxsize = sizeof(struct wp512_ctx), 1120 .final = wp512_final,
1112 .cra_module = THIS_MODULE, 1121 .descsize = sizeof(struct wp512_ctx),
1113 .cra_list = LIST_HEAD_INIT(wp512.cra_list), 1122 .base = {
1114 .cra_u = { .digest = { 1123 .cra_name = "wp512",
1115 .dia_digestsize = WP512_DIGEST_SIZE, 1124 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
1116 .dia_init = wp512_init, 1125 .cra_blocksize = WP512_BLOCK_SIZE,
1117 .dia_update = wp512_update, 1126 .cra_module = THIS_MODULE,
1118 .dia_final = wp512_final } } 1127 }
1119}; 1128};
1120 1129
1121static struct crypto_alg wp384 = { 1130static struct shash_alg wp384 = {
1122 .cra_name = "wp384", 1131 .digestsize = WP384_DIGEST_SIZE,
1123 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 1132 .init = wp512_init,
1124 .cra_blocksize = WP512_BLOCK_SIZE, 1133 .update = wp512_update,
1125 .cra_ctxsize = sizeof(struct wp512_ctx), 1134 .final = wp384_final,
1126 .cra_module = THIS_MODULE, 1135 .descsize = sizeof(struct wp512_ctx),
1127 .cra_list = LIST_HEAD_INIT(wp384.cra_list), 1136 .base = {
1128 .cra_u = { .digest = { 1137 .cra_name = "wp384",
1129 .dia_digestsize = WP384_DIGEST_SIZE, 1138 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
1130 .dia_init = wp512_init, 1139 .cra_blocksize = WP512_BLOCK_SIZE,
1131 .dia_update = wp512_update, 1140 .cra_module = THIS_MODULE,
1132 .dia_final = wp384_final } } 1141 }
1133}; 1142};
1134 1143
1135static struct crypto_alg wp256 = { 1144static struct shash_alg wp256 = {
1136 .cra_name = "wp256", 1145 .digestsize = WP256_DIGEST_SIZE,
1137 .cra_flags = CRYPTO_ALG_TYPE_DIGEST, 1146 .init = wp512_init,
1138 .cra_blocksize = WP512_BLOCK_SIZE, 1147 .update = wp512_update,
1139 .cra_ctxsize = sizeof(struct wp512_ctx), 1148 .final = wp256_final,
1140 .cra_module = THIS_MODULE, 1149 .descsize = sizeof(struct wp512_ctx),
1141 .cra_list = LIST_HEAD_INIT(wp256.cra_list), 1150 .base = {
1142 .cra_u = { .digest = { 1151 .cra_name = "wp256",
1143 .dia_digestsize = WP256_DIGEST_SIZE, 1152 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
1144 .dia_init = wp512_init, 1153 .cra_blocksize = WP512_BLOCK_SIZE,
1145 .dia_update = wp512_update, 1154 .cra_module = THIS_MODULE,
1146 .dia_final = wp256_final } } 1155 }
1147}; 1156};
1148 1157
1149static int __init wp512_mod_init(void) 1158static int __init wp512_mod_init(void)
1150{ 1159{
1151 int ret = 0; 1160 int ret = 0;
1152 1161
1153 ret = crypto_register_alg(&wp512); 1162 ret = crypto_register_shash(&wp512);
1154 1163
1155 if (ret < 0) 1164 if (ret < 0)
1156 goto out; 1165 goto out;
1157 1166
1158 ret = crypto_register_alg(&wp384); 1167 ret = crypto_register_shash(&wp384);
1159 if (ret < 0) 1168 if (ret < 0)
1160 { 1169 {
1161 crypto_unregister_alg(&wp512); 1170 crypto_unregister_shash(&wp512);
1162 goto out; 1171 goto out;
1163 } 1172 }
1164 1173
1165 ret = crypto_register_alg(&wp256); 1174 ret = crypto_register_shash(&wp256);
1166 if (ret < 0) 1175 if (ret < 0)
1167 { 1176 {
1168 crypto_unregister_alg(&wp512); 1177 crypto_unregister_shash(&wp512);
1169 crypto_unregister_alg(&wp384); 1178 crypto_unregister_shash(&wp384);
1170 } 1179 }
1171out: 1180out:
1172 return ret; 1181 return ret;
@@ -1174,9 +1183,9 @@ out:
1174 1183
1175static void __exit wp512_mod_fini(void) 1184static void __exit wp512_mod_fini(void)
1176{ 1185{
1177 crypto_unregister_alg(&wp512); 1186 crypto_unregister_shash(&wp512);
1178 crypto_unregister_alg(&wp384); 1187 crypto_unregister_shash(&wp384);
1179 crypto_unregister_alg(&wp256); 1188 crypto_unregister_shash(&wp256);
1180} 1189}
1181 1190
1182MODULE_ALIAS("wp384"); 1191MODULE_ALIAS("wp384");
diff --git a/drivers/crypto/hifn_795x.c b/drivers/crypto/hifn_795x.c
index 4d22b21bd3e3..0c79fe7f1567 100644
--- a/drivers/crypto/hifn_795x.c
+++ b/drivers/crypto/hifn_795x.c
@@ -38,9 +38,6 @@
38 38
39#include <asm/kmap_types.h> 39#include <asm/kmap_types.h>
40 40
41#undef dprintk
42
43#define HIFN_TEST
44//#define HIFN_DEBUG 41//#define HIFN_DEBUG
45 42
46#ifdef HIFN_DEBUG 43#ifdef HIFN_DEBUG
@@ -363,14 +360,14 @@ static atomic_t hifn_dev_number;
363#define HIFN_NAMESIZE 32 360#define HIFN_NAMESIZE 32
364#define HIFN_MAX_RESULT_ORDER 5 361#define HIFN_MAX_RESULT_ORDER 5
365 362
366#define HIFN_D_CMD_RSIZE 24*4 363#define HIFN_D_CMD_RSIZE 24*1
367#define HIFN_D_SRC_RSIZE 80*4 364#define HIFN_D_SRC_RSIZE 80*1
368#define HIFN_D_DST_RSIZE 80*4 365#define HIFN_D_DST_RSIZE 80*1
369#define HIFN_D_RES_RSIZE 24*4 366#define HIFN_D_RES_RSIZE 24*1
370 367
371#define HIFN_D_DST_DALIGN 4 368#define HIFN_D_DST_DALIGN 4
372 369
373#define HIFN_QUEUE_LENGTH HIFN_D_CMD_RSIZE-1 370#define HIFN_QUEUE_LENGTH (HIFN_D_CMD_RSIZE - 1)
374 371
375#define AES_MIN_KEY_SIZE 16 372#define AES_MIN_KEY_SIZE 16
376#define AES_MAX_KEY_SIZE 32 373#define AES_MAX_KEY_SIZE 32
@@ -406,8 +403,6 @@ struct hifn_dma {
406 u8 command_bufs[HIFN_D_CMD_RSIZE][HIFN_MAX_COMMAND]; 403 u8 command_bufs[HIFN_D_CMD_RSIZE][HIFN_MAX_COMMAND];
407 u8 result_bufs[HIFN_D_CMD_RSIZE][HIFN_MAX_RESULT]; 404 u8 result_bufs[HIFN_D_CMD_RSIZE][HIFN_MAX_RESULT];
408 405
409 u64 test_src, test_dst;
410
411 /* 406 /*
412 * Our current positions for insertion and removal from the descriptor 407 * Our current positions for insertion and removal from the descriptor
413 * rings. 408 * rings.
@@ -434,9 +429,6 @@ struct hifn_device
434 struct pci_dev *pdev; 429 struct pci_dev *pdev;
435 void __iomem *bar[3]; 430 void __iomem *bar[3];
436 431
437 unsigned long result_mem;
438 dma_addr_t dst;
439
440 void *desc_virt; 432 void *desc_virt;
441 dma_addr_t desc_dma; 433 dma_addr_t desc_dma;
442 434
@@ -446,8 +438,6 @@ struct hifn_device
446 438
447 spinlock_t lock; 439 spinlock_t lock;
448 440
449 void *priv;
450
451 u32 flags; 441 u32 flags;
452 int active, started; 442 int active, started;
453 struct delayed_work work; 443 struct delayed_work work;
@@ -657,12 +647,17 @@ struct ablkcipher_walk
657 647
658struct hifn_context 648struct hifn_context
659{ 649{
660 u8 key[HIFN_MAX_CRYPT_KEY_LENGTH], *iv; 650 u8 key[HIFN_MAX_CRYPT_KEY_LENGTH];
661 struct hifn_device *dev; 651 struct hifn_device *dev;
662 unsigned int keysize, ivsize; 652 unsigned int keysize;
653};
654
655struct hifn_request_context
656{
657 u8 *iv;
658 unsigned int ivsize;
663 u8 op, type, mode, unused; 659 u8 op, type, mode, unused;
664 struct ablkcipher_walk walk; 660 struct ablkcipher_walk walk;
665 atomic_t sg_num;
666}; 661};
667 662
668#define crypto_alg_to_hifn(a) container_of(a, struct hifn_crypto_alg, alg) 663#define crypto_alg_to_hifn(a) container_of(a, struct hifn_crypto_alg, alg)
@@ -1168,7 +1163,8 @@ static int hifn_setup_crypto_command(struct hifn_device *dev,
1168} 1163}
1169 1164
1170static int hifn_setup_cmd_desc(struct hifn_device *dev, 1165static int hifn_setup_cmd_desc(struct hifn_device *dev,
1171 struct hifn_context *ctx, void *priv, unsigned int nbytes) 1166 struct hifn_context *ctx, struct hifn_request_context *rctx,
1167 void *priv, unsigned int nbytes)
1172{ 1168{
1173 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; 1169 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1174 int cmd_len, sa_idx; 1170 int cmd_len, sa_idx;
@@ -1179,7 +1175,7 @@ static int hifn_setup_cmd_desc(struct hifn_device *dev,
1179 buf_pos = buf = dma->command_bufs[dma->cmdi]; 1175 buf_pos = buf = dma->command_bufs[dma->cmdi];
1180 1176
1181 mask = 0; 1177 mask = 0;
1182 switch (ctx->op) { 1178 switch (rctx->op) {
1183 case ACRYPTO_OP_DECRYPT: 1179 case ACRYPTO_OP_DECRYPT:
1184 mask = HIFN_BASE_CMD_CRYPT | HIFN_BASE_CMD_DECODE; 1180 mask = HIFN_BASE_CMD_CRYPT | HIFN_BASE_CMD_DECODE;
1185 break; 1181 break;
@@ -1196,15 +1192,15 @@ static int hifn_setup_cmd_desc(struct hifn_device *dev,
1196 buf_pos += hifn_setup_base_command(dev, buf_pos, nbytes, 1192 buf_pos += hifn_setup_base_command(dev, buf_pos, nbytes,
1197 nbytes, mask, dev->snum); 1193 nbytes, mask, dev->snum);
1198 1194
1199 if (ctx->op == ACRYPTO_OP_ENCRYPT || ctx->op == ACRYPTO_OP_DECRYPT) { 1195 if (rctx->op == ACRYPTO_OP_ENCRYPT || rctx->op == ACRYPTO_OP_DECRYPT) {
1200 u16 md = 0; 1196 u16 md = 0;
1201 1197
1202 if (ctx->keysize) 1198 if (ctx->keysize)
1203 md |= HIFN_CRYPT_CMD_NEW_KEY; 1199 md |= HIFN_CRYPT_CMD_NEW_KEY;
1204 if (ctx->iv && ctx->mode != ACRYPTO_MODE_ECB) 1200 if (rctx->iv && rctx->mode != ACRYPTO_MODE_ECB)
1205 md |= HIFN_CRYPT_CMD_NEW_IV; 1201 md |= HIFN_CRYPT_CMD_NEW_IV;
1206 1202
1207 switch (ctx->mode) { 1203 switch (rctx->mode) {
1208 case ACRYPTO_MODE_ECB: 1204 case ACRYPTO_MODE_ECB:
1209 md |= HIFN_CRYPT_CMD_MODE_ECB; 1205 md |= HIFN_CRYPT_CMD_MODE_ECB;
1210 break; 1206 break;
@@ -1221,7 +1217,7 @@ static int hifn_setup_cmd_desc(struct hifn_device *dev,
1221 goto err_out; 1217 goto err_out;
1222 } 1218 }
1223 1219
1224 switch (ctx->type) { 1220 switch (rctx->type) {
1225 case ACRYPTO_TYPE_AES_128: 1221 case ACRYPTO_TYPE_AES_128:
1226 if (ctx->keysize != 16) 1222 if (ctx->keysize != 16)
1227 goto err_out; 1223 goto err_out;
@@ -1256,17 +1252,18 @@ static int hifn_setup_cmd_desc(struct hifn_device *dev,
1256 1252
1257 buf_pos += hifn_setup_crypto_command(dev, buf_pos, 1253 buf_pos += hifn_setup_crypto_command(dev, buf_pos,
1258 nbytes, nbytes, ctx->key, ctx->keysize, 1254 nbytes, nbytes, ctx->key, ctx->keysize,
1259 ctx->iv, ctx->ivsize, md); 1255 rctx->iv, rctx->ivsize, md);
1260 } 1256 }
1261 1257
1262 dev->sa[sa_idx] = priv; 1258 dev->sa[sa_idx] = priv;
1259 dev->started++;
1263 1260
1264 cmd_len = buf_pos - buf; 1261 cmd_len = buf_pos - buf;
1265 dma->cmdr[dma->cmdi].l = __cpu_to_le32(cmd_len | HIFN_D_VALID | 1262 dma->cmdr[dma->cmdi].l = __cpu_to_le32(cmd_len | HIFN_D_VALID |
1266 HIFN_D_LAST | HIFN_D_MASKDONEIRQ); 1263 HIFN_D_LAST | HIFN_D_MASKDONEIRQ);
1267 1264
1268 if (++dma->cmdi == HIFN_D_CMD_RSIZE) { 1265 if (++dma->cmdi == HIFN_D_CMD_RSIZE) {
1269 dma->cmdr[dma->cmdi].l = __cpu_to_le32(HIFN_MAX_COMMAND | 1266 dma->cmdr[dma->cmdi].l = __cpu_to_le32(
1270 HIFN_D_VALID | HIFN_D_LAST | 1267 HIFN_D_VALID | HIFN_D_LAST |
1271 HIFN_D_MASKDONEIRQ | HIFN_D_JUMP); 1268 HIFN_D_MASKDONEIRQ | HIFN_D_JUMP);
1272 dma->cmdi = 0; 1269 dma->cmdi = 0;
@@ -1284,7 +1281,7 @@ err_out:
1284} 1281}
1285 1282
1286static int hifn_setup_src_desc(struct hifn_device *dev, struct page *page, 1283static int hifn_setup_src_desc(struct hifn_device *dev, struct page *page,
1287 unsigned int offset, unsigned int size) 1284 unsigned int offset, unsigned int size, int last)
1288{ 1285{
1289 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; 1286 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1290 int idx; 1287 int idx;
@@ -1296,12 +1293,12 @@ static int hifn_setup_src_desc(struct hifn_device *dev, struct page *page,
1296 1293
1297 dma->srcr[idx].p = __cpu_to_le32(addr); 1294 dma->srcr[idx].p = __cpu_to_le32(addr);
1298 dma->srcr[idx].l = __cpu_to_le32(size | HIFN_D_VALID | 1295 dma->srcr[idx].l = __cpu_to_le32(size | HIFN_D_VALID |
1299 HIFN_D_MASKDONEIRQ | HIFN_D_LAST); 1296 HIFN_D_MASKDONEIRQ | (last ? HIFN_D_LAST : 0));
1300 1297
1301 if (++idx == HIFN_D_SRC_RSIZE) { 1298 if (++idx == HIFN_D_SRC_RSIZE) {
1302 dma->srcr[idx].l = __cpu_to_le32(HIFN_D_VALID | 1299 dma->srcr[idx].l = __cpu_to_le32(HIFN_D_VALID |
1303 HIFN_D_JUMP | 1300 HIFN_D_JUMP | HIFN_D_MASKDONEIRQ |
1304 HIFN_D_MASKDONEIRQ | HIFN_D_LAST); 1301 (last ? HIFN_D_LAST : 0));
1305 idx = 0; 1302 idx = 0;
1306 } 1303 }
1307 1304
@@ -1342,7 +1339,7 @@ static void hifn_setup_res_desc(struct hifn_device *dev)
1342} 1339}
1343 1340
1344static void hifn_setup_dst_desc(struct hifn_device *dev, struct page *page, 1341static void hifn_setup_dst_desc(struct hifn_device *dev, struct page *page,
1345 unsigned offset, unsigned size) 1342 unsigned offset, unsigned size, int last)
1346{ 1343{
1347 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; 1344 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1348 int idx; 1345 int idx;
@@ -1353,12 +1350,12 @@ static void hifn_setup_dst_desc(struct hifn_device *dev, struct page *page,
1353 idx = dma->dsti; 1350 idx = dma->dsti;
1354 dma->dstr[idx].p = __cpu_to_le32(addr); 1351 dma->dstr[idx].p = __cpu_to_le32(addr);
1355 dma->dstr[idx].l = __cpu_to_le32(size | HIFN_D_VALID | 1352 dma->dstr[idx].l = __cpu_to_le32(size | HIFN_D_VALID |
1356 HIFN_D_MASKDONEIRQ | HIFN_D_LAST); 1353 HIFN_D_MASKDONEIRQ | (last ? HIFN_D_LAST : 0));
1357 1354
1358 if (++idx == HIFN_D_DST_RSIZE) { 1355 if (++idx == HIFN_D_DST_RSIZE) {
1359 dma->dstr[idx].l = __cpu_to_le32(HIFN_D_VALID | 1356 dma->dstr[idx].l = __cpu_to_le32(HIFN_D_VALID |
1360 HIFN_D_JUMP | HIFN_D_MASKDONEIRQ | 1357 HIFN_D_JUMP | HIFN_D_MASKDONEIRQ |
1361 HIFN_D_LAST); 1358 (last ? HIFN_D_LAST : 0));
1362 idx = 0; 1359 idx = 0;
1363 } 1360 }
1364 dma->dsti = idx; 1361 dma->dsti = idx;
@@ -1370,16 +1367,52 @@ static void hifn_setup_dst_desc(struct hifn_device *dev, struct page *page,
1370 } 1367 }
1371} 1368}
1372 1369
1373static int hifn_setup_dma(struct hifn_device *dev, struct page *spage, unsigned int soff, 1370static int hifn_setup_dma(struct hifn_device *dev,
1374 struct page *dpage, unsigned int doff, unsigned int nbytes, void *priv, 1371 struct hifn_context *ctx, struct hifn_request_context *rctx,
1375 struct hifn_context *ctx) 1372 struct scatterlist *src, struct scatterlist *dst,
1373 unsigned int nbytes, void *priv)
1376{ 1374{
1377 dprintk("%s: spage: %p, soffset: %u, dpage: %p, doffset: %u, nbytes: %u, priv: %p, ctx: %p.\n", 1375 struct scatterlist *t;
1378 dev->name, spage, soff, dpage, doff, nbytes, priv, ctx); 1376 struct page *spage, *dpage;
1377 unsigned int soff, doff;
1378 unsigned int n, len;
1379 1379
1380 hifn_setup_src_desc(dev, spage, soff, nbytes); 1380 n = nbytes;
1381 hifn_setup_cmd_desc(dev, ctx, priv, nbytes); 1381 while (n) {
1382 hifn_setup_dst_desc(dev, dpage, doff, nbytes); 1382 spage = sg_page(src);
1383 soff = src->offset;
1384 len = min(src->length, n);
1385
1386 hifn_setup_src_desc(dev, spage, soff, len, n - len == 0);
1387
1388 src++;
1389 n -= len;
1390 }
1391
1392 t = &rctx->walk.cache[0];
1393 n = nbytes;
1394 while (n) {
1395 if (t->length && rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
1396 BUG_ON(!sg_page(t));
1397 dpage = sg_page(t);
1398 doff = 0;
1399 len = t->length;
1400 } else {
1401 BUG_ON(!sg_page(dst));
1402 dpage = sg_page(dst);
1403 doff = dst->offset;
1404 len = dst->length;
1405 }
1406 len = min(len, n);
1407
1408 hifn_setup_dst_desc(dev, dpage, doff, len, n - len == 0);
1409
1410 dst++;
1411 t++;
1412 n -= len;
1413 }
1414
1415 hifn_setup_cmd_desc(dev, ctx, rctx, priv, nbytes);
1383 hifn_setup_res_desc(dev); 1416 hifn_setup_res_desc(dev);
1384 return 0; 1417 return 0;
1385} 1418}
@@ -1424,32 +1457,26 @@ static void ablkcipher_walk_exit(struct ablkcipher_walk *w)
1424 w->num = 0; 1457 w->num = 0;
1425} 1458}
1426 1459
1427static int ablkcipher_add(void *daddr, unsigned int *drestp, struct scatterlist *src, 1460static int ablkcipher_add(unsigned int *drestp, struct scatterlist *dst,
1428 unsigned int size, unsigned int *nbytesp) 1461 unsigned int size, unsigned int *nbytesp)
1429{ 1462{
1430 unsigned int copy, drest = *drestp, nbytes = *nbytesp; 1463 unsigned int copy, drest = *drestp, nbytes = *nbytesp;
1431 int idx = 0; 1464 int idx = 0;
1432 void *saddr;
1433 1465
1434 if (drest < size || size > nbytes) 1466 if (drest < size || size > nbytes)
1435 return -EINVAL; 1467 return -EINVAL;
1436 1468
1437 while (size) { 1469 while (size) {
1438 copy = min(drest, min(size, src->length)); 1470 copy = min(drest, min(size, dst->length));
1439
1440 saddr = kmap_atomic(sg_page(src), KM_SOFTIRQ1);
1441 memcpy(daddr, saddr + src->offset, copy);
1442 kunmap_atomic(saddr, KM_SOFTIRQ1);
1443 1471
1444 size -= copy; 1472 size -= copy;
1445 drest -= copy; 1473 drest -= copy;
1446 nbytes -= copy; 1474 nbytes -= copy;
1447 daddr += copy;
1448 1475
1449 dprintk("%s: copy: %u, size: %u, drest: %u, nbytes: %u.\n", 1476 dprintk("%s: copy: %u, size: %u, drest: %u, nbytes: %u.\n",
1450 __func__, copy, size, drest, nbytes); 1477 __func__, copy, size, drest, nbytes);
1451 1478
1452 src++; 1479 dst++;
1453 idx++; 1480 idx++;
1454 } 1481 }
1455 1482
@@ -1462,8 +1489,7 @@ static int ablkcipher_add(void *daddr, unsigned int *drestp, struct scatterlist
1462static int ablkcipher_walk(struct ablkcipher_request *req, 1489static int ablkcipher_walk(struct ablkcipher_request *req,
1463 struct ablkcipher_walk *w) 1490 struct ablkcipher_walk *w)
1464{ 1491{
1465 struct scatterlist *src, *dst, *t; 1492 struct scatterlist *dst, *t;
1466 void *daddr;
1467 unsigned int nbytes = req->nbytes, offset, copy, diff; 1493 unsigned int nbytes = req->nbytes, offset, copy, diff;
1468 int idx, tidx, err; 1494 int idx, tidx, err;
1469 1495
@@ -1473,26 +1499,22 @@ static int ablkcipher_walk(struct ablkcipher_request *req,
1473 if (idx >= w->num && (w->flags & ASYNC_FLAGS_MISALIGNED)) 1499 if (idx >= w->num && (w->flags & ASYNC_FLAGS_MISALIGNED))
1474 return -EINVAL; 1500 return -EINVAL;
1475 1501
1476 src = &req->src[idx];
1477 dst = &req->dst[idx]; 1502 dst = &req->dst[idx];
1478 1503
1479 dprintk("\n%s: slen: %u, dlen: %u, soff: %u, doff: %u, offset: %u, " 1504 dprintk("\n%s: dlen: %u, doff: %u, offset: %u, nbytes: %u.\n",
1480 "nbytes: %u.\n", 1505 __func__, dst->length, dst->offset, offset, nbytes);
1481 __func__, src->length, dst->length, src->offset,
1482 dst->offset, offset, nbytes);
1483 1506
1484 if (!IS_ALIGNED(dst->offset, HIFN_D_DST_DALIGN) || 1507 if (!IS_ALIGNED(dst->offset, HIFN_D_DST_DALIGN) ||
1485 !IS_ALIGNED(dst->length, HIFN_D_DST_DALIGN) || 1508 !IS_ALIGNED(dst->length, HIFN_D_DST_DALIGN) ||
1486 offset) { 1509 offset) {
1487 unsigned slen = min(src->length - offset, nbytes); 1510 unsigned slen = min(dst->length - offset, nbytes);
1488 unsigned dlen = PAGE_SIZE; 1511 unsigned dlen = PAGE_SIZE;
1489 1512
1490 t = &w->cache[idx]; 1513 t = &w->cache[idx];
1491 1514
1492 daddr = kmap_atomic(sg_page(t), KM_SOFTIRQ0); 1515 err = ablkcipher_add(&dlen, dst, slen, &nbytes);
1493 err = ablkcipher_add(daddr, &dlen, src, slen, &nbytes);
1494 if (err < 0) 1516 if (err < 0)
1495 goto err_out_unmap; 1517 return err;
1496 1518
1497 idx += err; 1519 idx += err;
1498 1520
@@ -1528,21 +1550,19 @@ static int ablkcipher_walk(struct ablkcipher_request *req,
1528 } else { 1550 } else {
1529 copy += diff + nbytes; 1551 copy += diff + nbytes;
1530 1552
1531 src = &req->src[idx]; 1553 dst = &req->dst[idx];
1532 1554
1533 err = ablkcipher_add(daddr + slen, &dlen, src, nbytes, &nbytes); 1555 err = ablkcipher_add(&dlen, dst, nbytes, &nbytes);
1534 if (err < 0) 1556 if (err < 0)
1535 goto err_out_unmap; 1557 return err;
1536 1558
1537 idx += err; 1559 idx += err;
1538 } 1560 }
1539 1561
1540 t->length = copy; 1562 t->length = copy;
1541 t->offset = offset; 1563 t->offset = offset;
1542
1543 kunmap_atomic(daddr, KM_SOFTIRQ0);
1544 } else { 1564 } else {
1545 nbytes -= min(src->length, nbytes); 1565 nbytes -= min(dst->length, nbytes);
1546 idx++; 1566 idx++;
1547 } 1567 }
1548 1568
@@ -1550,26 +1570,22 @@ static int ablkcipher_walk(struct ablkcipher_request *req,
1550 } 1570 }
1551 1571
1552 return tidx; 1572 return tidx;
1553
1554err_out_unmap:
1555 kunmap_atomic(daddr, KM_SOFTIRQ0);
1556 return err;
1557} 1573}
1558 1574
1559static int hifn_setup_session(struct ablkcipher_request *req) 1575static int hifn_setup_session(struct ablkcipher_request *req)
1560{ 1576{
1561 struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm); 1577 struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
1578 struct hifn_request_context *rctx = ablkcipher_request_ctx(req);
1562 struct hifn_device *dev = ctx->dev; 1579 struct hifn_device *dev = ctx->dev;
1563 struct page *spage, *dpage; 1580 unsigned long dlen, flags;
1564 unsigned long soff, doff, dlen, flags; 1581 unsigned int nbytes = req->nbytes, idx = 0;
1565 unsigned int nbytes = req->nbytes, idx = 0, len;
1566 int err = -EINVAL, sg_num; 1582 int err = -EINVAL, sg_num;
1567 struct scatterlist *src, *dst, *t; 1583 struct scatterlist *dst;
1568 1584
1569 if (ctx->iv && !ctx->ivsize && ctx->mode != ACRYPTO_MODE_ECB) 1585 if (rctx->iv && !rctx->ivsize && rctx->mode != ACRYPTO_MODE_ECB)
1570 goto err_out_exit; 1586 goto err_out_exit;
1571 1587
1572 ctx->walk.flags = 0; 1588 rctx->walk.flags = 0;
1573 1589
1574 while (nbytes) { 1590 while (nbytes) {
1575 dst = &req->dst[idx]; 1591 dst = &req->dst[idx];
@@ -1577,27 +1593,23 @@ static int hifn_setup_session(struct ablkcipher_request *req)
1577 1593
1578 if (!IS_ALIGNED(dst->offset, HIFN_D_DST_DALIGN) || 1594 if (!IS_ALIGNED(dst->offset, HIFN_D_DST_DALIGN) ||
1579 !IS_ALIGNED(dlen, HIFN_D_DST_DALIGN)) 1595 !IS_ALIGNED(dlen, HIFN_D_DST_DALIGN))
1580 ctx->walk.flags |= ASYNC_FLAGS_MISALIGNED; 1596 rctx->walk.flags |= ASYNC_FLAGS_MISALIGNED;
1581 1597
1582 nbytes -= dlen; 1598 nbytes -= dlen;
1583 idx++; 1599 idx++;
1584 } 1600 }
1585 1601
1586 if (ctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { 1602 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
1587 err = ablkcipher_walk_init(&ctx->walk, idx, GFP_ATOMIC); 1603 err = ablkcipher_walk_init(&rctx->walk, idx, GFP_ATOMIC);
1588 if (err < 0) 1604 if (err < 0)
1589 return err; 1605 return err;
1590 } 1606 }
1591 1607
1592 nbytes = req->nbytes; 1608 sg_num = ablkcipher_walk(req, &rctx->walk);
1593 idx = 0;
1594
1595 sg_num = ablkcipher_walk(req, &ctx->walk);
1596 if (sg_num < 0) { 1609 if (sg_num < 0) {
1597 err = sg_num; 1610 err = sg_num;
1598 goto err_out_exit; 1611 goto err_out_exit;
1599 } 1612 }
1600 atomic_set(&ctx->sg_num, sg_num);
1601 1613
1602 spin_lock_irqsave(&dev->lock, flags); 1614 spin_lock_irqsave(&dev->lock, flags);
1603 if (dev->started + sg_num > HIFN_QUEUE_LENGTH) { 1615 if (dev->started + sg_num > HIFN_QUEUE_LENGTH) {
@@ -1605,37 +1617,11 @@ static int hifn_setup_session(struct ablkcipher_request *req)
1605 goto err_out; 1617 goto err_out;
1606 } 1618 }
1607 1619
1608 dev->snum++; 1620 err = hifn_setup_dma(dev, ctx, rctx, req->src, req->dst, req->nbytes, req);
1609 dev->started += sg_num; 1621 if (err)
1610 1622 goto err_out;
1611 while (nbytes) {
1612 src = &req->src[idx];
1613 dst = &req->dst[idx];
1614 t = &ctx->walk.cache[idx];
1615
1616 if (t->length) {
1617 spage = dpage = sg_page(t);
1618 soff = doff = 0;
1619 len = t->length;
1620 } else {
1621 spage = sg_page(src);
1622 soff = src->offset;
1623
1624 dpage = sg_page(dst);
1625 doff = dst->offset;
1626
1627 len = dst->length;
1628 }
1629
1630 idx++;
1631
1632 err = hifn_setup_dma(dev, spage, soff, dpage, doff, nbytes,
1633 req, ctx);
1634 if (err)
1635 goto err_out;
1636 1623
1637 nbytes -= min(len, nbytes); 1624 dev->snum++;
1638 }
1639 1625
1640 dev->active = HIFN_DEFAULT_ACTIVE_NUM; 1626 dev->active = HIFN_DEFAULT_ACTIVE_NUM;
1641 spin_unlock_irqrestore(&dev->lock, flags); 1627 spin_unlock_irqrestore(&dev->lock, flags);
@@ -1645,12 +1631,13 @@ static int hifn_setup_session(struct ablkcipher_request *req)
1645err_out: 1631err_out:
1646 spin_unlock_irqrestore(&dev->lock, flags); 1632 spin_unlock_irqrestore(&dev->lock, flags);
1647err_out_exit: 1633err_out_exit:
1648 if (err) 1634 if (err) {
1649 dprintk("%s: iv: %p [%d], key: %p [%d], mode: %u, op: %u, " 1635 printk("%s: iv: %p [%d], key: %p [%d], mode: %u, op: %u, "
1650 "type: %u, err: %d.\n", 1636 "type: %u, err: %d.\n",
1651 dev->name, ctx->iv, ctx->ivsize, 1637 dev->name, rctx->iv, rctx->ivsize,
1652 ctx->key, ctx->keysize, 1638 ctx->key, ctx->keysize,
1653 ctx->mode, ctx->op, ctx->type, err); 1639 rctx->mode, rctx->op, rctx->type, err);
1640 }
1654 1641
1655 return err; 1642 return err;
1656} 1643}
@@ -1660,31 +1647,33 @@ static int hifn_test(struct hifn_device *dev, int encdec, u8 snum)
1660 int n, err; 1647 int n, err;
1661 u8 src[16]; 1648 u8 src[16];
1662 struct hifn_context ctx; 1649 struct hifn_context ctx;
1650 struct hifn_request_context rctx;
1663 u8 fips_aes_ecb_from_zero[16] = { 1651 u8 fips_aes_ecb_from_zero[16] = {
1664 0x66, 0xE9, 0x4B, 0xD4, 1652 0x66, 0xE9, 0x4B, 0xD4,
1665 0xEF, 0x8A, 0x2C, 0x3B, 1653 0xEF, 0x8A, 0x2C, 0x3B,
1666 0x88, 0x4C, 0xFA, 0x59, 1654 0x88, 0x4C, 0xFA, 0x59,
1667 0xCA, 0x34, 0x2B, 0x2E}; 1655 0xCA, 0x34, 0x2B, 0x2E};
1656 struct scatterlist sg;
1668 1657
1669 memset(src, 0, sizeof(src)); 1658 memset(src, 0, sizeof(src));
1670 memset(ctx.key, 0, sizeof(ctx.key)); 1659 memset(ctx.key, 0, sizeof(ctx.key));
1671 1660
1672 ctx.dev = dev; 1661 ctx.dev = dev;
1673 ctx.keysize = 16; 1662 ctx.keysize = 16;
1674 ctx.ivsize = 0; 1663 rctx.ivsize = 0;
1675 ctx.iv = NULL; 1664 rctx.iv = NULL;
1676 ctx.op = (encdec)?ACRYPTO_OP_ENCRYPT:ACRYPTO_OP_DECRYPT; 1665 rctx.op = (encdec)?ACRYPTO_OP_ENCRYPT:ACRYPTO_OP_DECRYPT;
1677 ctx.mode = ACRYPTO_MODE_ECB; 1666 rctx.mode = ACRYPTO_MODE_ECB;
1678 ctx.type = ACRYPTO_TYPE_AES_128; 1667 rctx.type = ACRYPTO_TYPE_AES_128;
1679 atomic_set(&ctx.sg_num, 1); 1668 rctx.walk.cache[0].length = 0;
1680 1669
1681 err = hifn_setup_dma(dev, 1670 sg_init_one(&sg, &src, sizeof(src));
1682 virt_to_page(src), offset_in_page(src), 1671
1683 virt_to_page(src), offset_in_page(src), 1672 err = hifn_setup_dma(dev, &ctx, &rctx, &sg, &sg, sizeof(src), NULL);
1684 sizeof(src), NULL, &ctx);
1685 if (err) 1673 if (err)
1686 goto err_out; 1674 goto err_out;
1687 1675
1676 dev->started = 0;
1688 msleep(200); 1677 msleep(200);
1689 1678
1690 dprintk("%s: decoded: ", dev->name); 1679 dprintk("%s: decoded: ", dev->name);
@@ -1711,6 +1700,7 @@ static int hifn_start_device(struct hifn_device *dev)
1711{ 1700{
1712 int err; 1701 int err;
1713 1702
1703 dev->started = dev->active = 0;
1714 hifn_reset_dma(dev, 1); 1704 hifn_reset_dma(dev, 1);
1715 1705
1716 err = hifn_enable_crypto(dev); 1706 err = hifn_enable_crypto(dev);
@@ -1764,90 +1754,65 @@ static int ablkcipher_get(void *saddr, unsigned int *srestp, unsigned int offset
1764 return idx; 1754 return idx;
1765} 1755}
1766 1756
1767static void hifn_process_ready(struct ablkcipher_request *req, int error) 1757static inline void hifn_complete_sa(struct hifn_device *dev, int i)
1768{ 1758{
1769 struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm); 1759 unsigned long flags;
1770 struct hifn_device *dev;
1771
1772 dprintk("%s: req: %p, ctx: %p.\n", __func__, req, ctx);
1773 1760
1774 dev = ctx->dev; 1761 spin_lock_irqsave(&dev->lock, flags);
1775 dprintk("%s: req: %p, started: %d, sg_num: %d.\n", 1762 dev->sa[i] = NULL;
1776 __func__, req, dev->started, atomic_read(&ctx->sg_num)); 1763 dev->started--;
1764 if (dev->started < 0)
1765 printk("%s: started: %d.\n", __func__, dev->started);
1766 spin_unlock_irqrestore(&dev->lock, flags);
1767 BUG_ON(dev->started < 0);
1768}
1777 1769
1778 if (--dev->started < 0) 1770static void hifn_process_ready(struct ablkcipher_request *req, int error)
1779 BUG(); 1771{
1772 struct hifn_request_context *rctx = ablkcipher_request_ctx(req);
1780 1773
1781 if (atomic_dec_and_test(&ctx->sg_num)) { 1774 if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
1782 unsigned int nbytes = req->nbytes; 1775 unsigned int nbytes = req->nbytes;
1783 int idx = 0, err; 1776 int idx = 0, err;
1784 struct scatterlist *dst, *t; 1777 struct scatterlist *dst, *t;
1785 void *saddr; 1778 void *saddr;
1786 1779
1787 if (ctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { 1780 while (nbytes) {
1788 while (nbytes) { 1781 t = &rctx->walk.cache[idx];
1789 t = &ctx->walk.cache[idx]; 1782 dst = &req->dst[idx];
1790 dst = &req->dst[idx];
1791
1792 dprintk("\n%s: sg_page(t): %p, t->length: %u, "
1793 "sg_page(dst): %p, dst->length: %u, "
1794 "nbytes: %u.\n",
1795 __func__, sg_page(t), t->length,
1796 sg_page(dst), dst->length, nbytes);
1797 1783
1798 if (!t->length) { 1784 dprintk("\n%s: sg_page(t): %p, t->length: %u, "
1799 nbytes -= min(dst->length, nbytes); 1785 "sg_page(dst): %p, dst->length: %u, "
1800 idx++; 1786 "nbytes: %u.\n",
1801 continue; 1787 __func__, sg_page(t), t->length,
1802 } 1788 sg_page(dst), dst->length, nbytes);
1803 1789
1804 saddr = kmap_atomic(sg_page(t), KM_IRQ1); 1790 if (!t->length) {
1791 nbytes -= min(dst->length, nbytes);
1792 idx++;
1793 continue;
1794 }
1805 1795
1806 err = ablkcipher_get(saddr, &t->length, t->offset, 1796 saddr = kmap_atomic(sg_page(t), KM_SOFTIRQ0);
1807 dst, nbytes, &nbytes);
1808 if (err < 0) {
1809 kunmap_atomic(saddr, KM_IRQ1);
1810 break;
1811 }
1812 1797
1813 idx += err; 1798 err = ablkcipher_get(saddr, &t->length, t->offset,
1814 kunmap_atomic(saddr, KM_IRQ1); 1799 dst, nbytes, &nbytes);
1800 if (err < 0) {
1801 kunmap_atomic(saddr, KM_SOFTIRQ0);
1802 break;
1815 } 1803 }
1816 1804
1817 ablkcipher_walk_exit(&ctx->walk); 1805 idx += err;
1806 kunmap_atomic(saddr, KM_SOFTIRQ0);
1818 } 1807 }
1819 1808
1820 req->base.complete(&req->base, error); 1809 ablkcipher_walk_exit(&rctx->walk);
1821 } 1810 }
1822}
1823 1811
1824static void hifn_check_for_completion(struct hifn_device *dev, int error) 1812 req->base.complete(&req->base, error);
1825{
1826 int i;
1827 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1828
1829 for (i=0; i<HIFN_D_RES_RSIZE; ++i) {
1830 struct hifn_desc *d = &dma->resr[i];
1831
1832 if (!(d->l & __cpu_to_le32(HIFN_D_VALID)) && dev->sa[i]) {
1833 dev->success++;
1834 dev->reset = 0;
1835 hifn_process_ready(dev->sa[i], error);
1836 dev->sa[i] = NULL;
1837 }
1838
1839 if (d->l & __cpu_to_le32(HIFN_D_DESTOVER | HIFN_D_OVER))
1840 if (printk_ratelimit())
1841 printk("%s: overflow detected [d: %u, o: %u] "
1842 "at %d resr: l: %08x, p: %08x.\n",
1843 dev->name,
1844 !!(d->l & __cpu_to_le32(HIFN_D_DESTOVER)),
1845 !!(d->l & __cpu_to_le32(HIFN_D_OVER)),
1846 i, d->l, d->p);
1847 }
1848} 1813}
1849 1814
1850static void hifn_clear_rings(struct hifn_device *dev) 1815static void hifn_clear_rings(struct hifn_device *dev, int error)
1851{ 1816{
1852 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; 1817 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1853 int i, u; 1818 int i, u;
@@ -1864,21 +1829,26 @@ static void hifn_clear_rings(struct hifn_device *dev)
1864 if (dma->resr[i].l & __cpu_to_le32(HIFN_D_VALID)) 1829 if (dma->resr[i].l & __cpu_to_le32(HIFN_D_VALID))
1865 break; 1830 break;
1866 1831
1867 if (i != HIFN_D_RES_RSIZE) 1832 if (dev->sa[i]) {
1868 u--; 1833 dev->success++;
1834 dev->reset = 0;
1835 hifn_process_ready(dev->sa[i], error);
1836 hifn_complete_sa(dev, i);
1837 }
1869 1838
1870 if (++i == (HIFN_D_RES_RSIZE + 1)) 1839 if (++i == HIFN_D_RES_RSIZE)
1871 i = 0; 1840 i = 0;
1841 u--;
1872 } 1842 }
1873 dma->resk = i; dma->resu = u; 1843 dma->resk = i; dma->resu = u;
1874 1844
1875 i = dma->srck; u = dma->srcu; 1845 i = dma->srck; u = dma->srcu;
1876 while (u != 0) { 1846 while (u != 0) {
1877 if (i == HIFN_D_SRC_RSIZE)
1878 i = 0;
1879 if (dma->srcr[i].l & __cpu_to_le32(HIFN_D_VALID)) 1847 if (dma->srcr[i].l & __cpu_to_le32(HIFN_D_VALID))
1880 break; 1848 break;
1881 i++, u--; 1849 if (++i == HIFN_D_SRC_RSIZE)
1850 i = 0;
1851 u--;
1882 } 1852 }
1883 dma->srck = i; dma->srcu = u; 1853 dma->srck = i; dma->srcu = u;
1884 1854
@@ -1886,20 +1856,19 @@ static void hifn_clear_rings(struct hifn_device *dev)
1886 while (u != 0) { 1856 while (u != 0) {
1887 if (dma->cmdr[i].l & __cpu_to_le32(HIFN_D_VALID)) 1857 if (dma->cmdr[i].l & __cpu_to_le32(HIFN_D_VALID))
1888 break; 1858 break;
1889 if (i != HIFN_D_CMD_RSIZE) 1859 if (++i == HIFN_D_CMD_RSIZE)
1890 u--;
1891 if (++i == (HIFN_D_CMD_RSIZE + 1))
1892 i = 0; 1860 i = 0;
1861 u--;
1893 } 1862 }
1894 dma->cmdk = i; dma->cmdu = u; 1863 dma->cmdk = i; dma->cmdu = u;
1895 1864
1896 i = dma->dstk; u = dma->dstu; 1865 i = dma->dstk; u = dma->dstu;
1897 while (u != 0) { 1866 while (u != 0) {
1898 if (i == HIFN_D_DST_RSIZE)
1899 i = 0;
1900 if (dma->dstr[i].l & __cpu_to_le32(HIFN_D_VALID)) 1867 if (dma->dstr[i].l & __cpu_to_le32(HIFN_D_VALID))
1901 break; 1868 break;
1902 i++, u--; 1869 if (++i == HIFN_D_DST_RSIZE)
1870 i = 0;
1871 u--;
1903 } 1872 }
1904 dma->dstk = i; dma->dstu = u; 1873 dma->dstk = i; dma->dstu = u;
1905 1874
@@ -1944,30 +1913,39 @@ static void hifn_work(struct work_struct *work)
1944 } else 1913 } else
1945 dev->active--; 1914 dev->active--;
1946 1915
1947 if (dev->prev_success == dev->success && dev->started) 1916 if ((dev->prev_success == dev->success) && dev->started)
1948 reset = 1; 1917 reset = 1;
1949 dev->prev_success = dev->success; 1918 dev->prev_success = dev->success;
1950 spin_unlock_irqrestore(&dev->lock, flags); 1919 spin_unlock_irqrestore(&dev->lock, flags);
1951 1920
1952 if (reset) { 1921 if (reset) {
1953 dprintk("%s: r: %08x, active: %d, started: %d, "
1954 "success: %lu: reset: %d.\n",
1955 dev->name, r, dev->active, dev->started,
1956 dev->success, reset);
1957
1958 if (++dev->reset >= 5) { 1922 if (++dev->reset >= 5) {
1959 dprintk("%s: really hard reset.\n", dev->name); 1923 int i;
1924 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
1925
1926 printk("%s: r: %08x, active: %d, started: %d, "
1927 "success: %lu: qlen: %u/%u, reset: %d.\n",
1928 dev->name, r, dev->active, dev->started,
1929 dev->success, dev->queue.qlen, dev->queue.max_qlen,
1930 reset);
1931
1932 printk("%s: res: ", __func__);
1933 for (i=0; i<HIFN_D_RES_RSIZE; ++i) {
1934 printk("%x.%p ", dma->resr[i].l, dev->sa[i]);
1935 if (dev->sa[i]) {
1936 hifn_process_ready(dev->sa[i], -ENODEV);
1937 hifn_complete_sa(dev, i);
1938 }
1939 }
1940 printk("\n");
1941
1960 hifn_reset_dma(dev, 1); 1942 hifn_reset_dma(dev, 1);
1961 hifn_stop_device(dev); 1943 hifn_stop_device(dev);
1962 hifn_start_device(dev); 1944 hifn_start_device(dev);
1963 dev->reset = 0; 1945 dev->reset = 0;
1964 } 1946 }
1965 1947
1966 spin_lock_irqsave(&dev->lock, flags); 1948 tasklet_schedule(&dev->tasklet);
1967 hifn_check_for_completion(dev, -EBUSY);
1968 hifn_clear_rings(dev);
1969 dev->started = 0;
1970 spin_unlock_irqrestore(&dev->lock, flags);
1971 } 1949 }
1972 1950
1973 schedule_delayed_work(&dev->work, HZ); 1951 schedule_delayed_work(&dev->work, HZ);
@@ -1984,8 +1962,8 @@ static irqreturn_t hifn_interrupt(int irq, void *data)
1984 dprintk("%s: 1 dmacsr: %08x, dmareg: %08x, res: %08x [%d], " 1962 dprintk("%s: 1 dmacsr: %08x, dmareg: %08x, res: %08x [%d], "
1985 "i: %d.%d.%d.%d, u: %d.%d.%d.%d.\n", 1963 "i: %d.%d.%d.%d, u: %d.%d.%d.%d.\n",
1986 dev->name, dmacsr, dev->dmareg, dmacsr & dev->dmareg, dma->cmdi, 1964 dev->name, dmacsr, dev->dmareg, dmacsr & dev->dmareg, dma->cmdi,
1987 dma->cmdu, dma->srcu, dma->dstu, dma->resu, 1965 dma->cmdi, dma->srci, dma->dsti, dma->resi,
1988 dma->cmdi, dma->srci, dma->dsti, dma->resi); 1966 dma->cmdu, dma->srcu, dma->dstu, dma->resu);
1989 1967
1990 if ((dmacsr & dev->dmareg) == 0) 1968 if ((dmacsr & dev->dmareg) == 0)
1991 return IRQ_NONE; 1969 return IRQ_NONE;
@@ -2002,11 +1980,10 @@ static irqreturn_t hifn_interrupt(int irq, void *data)
2002 if (restart) { 1980 if (restart) {
2003 u32 puisr = hifn_read_0(dev, HIFN_0_PUISR); 1981 u32 puisr = hifn_read_0(dev, HIFN_0_PUISR);
2004 1982
2005 if (printk_ratelimit()) 1983 printk(KERN_WARNING "%s: overflow: r: %d, d: %d, puisr: %08x, d: %u.\n",
2006 printk("%s: overflow: r: %d, d: %d, puisr: %08x, d: %u.\n", 1984 dev->name, !!(dmacsr & HIFN_DMACSR_R_OVER),
2007 dev->name, !!(dmacsr & HIFN_DMACSR_R_OVER), 1985 !!(dmacsr & HIFN_DMACSR_D_OVER),
2008 !!(dmacsr & HIFN_DMACSR_D_OVER), 1986 puisr, !!(puisr & HIFN_PUISR_DSTOVER));
2009 puisr, !!(puisr & HIFN_PUISR_DSTOVER));
2010 if (!!(puisr & HIFN_PUISR_DSTOVER)) 1987 if (!!(puisr & HIFN_PUISR_DSTOVER))
2011 hifn_write_0(dev, HIFN_0_PUISR, HIFN_PUISR_DSTOVER); 1988 hifn_write_0(dev, HIFN_0_PUISR, HIFN_PUISR_DSTOVER);
2012 hifn_write_1(dev, HIFN_1_DMA_CSR, dmacsr & (HIFN_DMACSR_R_OVER | 1989 hifn_write_1(dev, HIFN_1_DMA_CSR, dmacsr & (HIFN_DMACSR_R_OVER |
@@ -2016,12 +1993,11 @@ static irqreturn_t hifn_interrupt(int irq, void *data)
2016 restart = dmacsr & (HIFN_DMACSR_C_ABORT | HIFN_DMACSR_S_ABORT | 1993 restart = dmacsr & (HIFN_DMACSR_C_ABORT | HIFN_DMACSR_S_ABORT |
2017 HIFN_DMACSR_D_ABORT | HIFN_DMACSR_R_ABORT); 1994 HIFN_DMACSR_D_ABORT | HIFN_DMACSR_R_ABORT);
2018 if (restart) { 1995 if (restart) {
2019 if (printk_ratelimit()) 1996 printk(KERN_WARNING "%s: abort: c: %d, s: %d, d: %d, r: %d.\n",
2020 printk("%s: abort: c: %d, s: %d, d: %d, r: %d.\n", 1997 dev->name, !!(dmacsr & HIFN_DMACSR_C_ABORT),
2021 dev->name, !!(dmacsr & HIFN_DMACSR_C_ABORT), 1998 !!(dmacsr & HIFN_DMACSR_S_ABORT),
2022 !!(dmacsr & HIFN_DMACSR_S_ABORT), 1999 !!(dmacsr & HIFN_DMACSR_D_ABORT),
2023 !!(dmacsr & HIFN_DMACSR_D_ABORT), 2000 !!(dmacsr & HIFN_DMACSR_R_ABORT));
2024 !!(dmacsr & HIFN_DMACSR_R_ABORT));
2025 hifn_reset_dma(dev, 1); 2001 hifn_reset_dma(dev, 1);
2026 hifn_init_dma(dev); 2002 hifn_init_dma(dev);
2027 hifn_init_registers(dev); 2003 hifn_init_registers(dev);
@@ -2034,7 +2010,6 @@ static irqreturn_t hifn_interrupt(int irq, void *data)
2034 } 2010 }
2035 2011
2036 tasklet_schedule(&dev->tasklet); 2012 tasklet_schedule(&dev->tasklet);
2037 hifn_clear_rings(dev);
2038 2013
2039 return IRQ_HANDLED; 2014 return IRQ_HANDLED;
2040} 2015}
@@ -2048,21 +2023,25 @@ static void hifn_flush(struct hifn_device *dev)
2048 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt; 2023 struct hifn_dma *dma = (struct hifn_dma *)dev->desc_virt;
2049 int i; 2024 int i;
2050 2025
2051 spin_lock_irqsave(&dev->lock, flags);
2052 for (i=0; i<HIFN_D_RES_RSIZE; ++i) { 2026 for (i=0; i<HIFN_D_RES_RSIZE; ++i) {
2053 struct hifn_desc *d = &dma->resr[i]; 2027 struct hifn_desc *d = &dma->resr[i];
2054 2028
2055 if (dev->sa[i]) { 2029 if (dev->sa[i]) {
2056 hifn_process_ready(dev->sa[i], 2030 hifn_process_ready(dev->sa[i],
2057 (d->l & __cpu_to_le32(HIFN_D_VALID))?-ENODEV:0); 2031 (d->l & __cpu_to_le32(HIFN_D_VALID))?-ENODEV:0);
2032 hifn_complete_sa(dev, i);
2058 } 2033 }
2059 } 2034 }
2060 2035
2036 spin_lock_irqsave(&dev->lock, flags);
2061 while ((async_req = crypto_dequeue_request(&dev->queue))) { 2037 while ((async_req = crypto_dequeue_request(&dev->queue))) {
2062 ctx = crypto_tfm_ctx(async_req->tfm); 2038 ctx = crypto_tfm_ctx(async_req->tfm);
2063 req = container_of(async_req, struct ablkcipher_request, base); 2039 req = container_of(async_req, struct ablkcipher_request, base);
2040 spin_unlock_irqrestore(&dev->lock, flags);
2064 2041
2065 hifn_process_ready(req, -ENODEV); 2042 hifn_process_ready(req, -ENODEV);
2043
2044 spin_lock_irqsave(&dev->lock, flags);
2066 } 2045 }
2067 spin_unlock_irqrestore(&dev->lock, flags); 2046 spin_unlock_irqrestore(&dev->lock, flags);
2068} 2047}
@@ -2121,6 +2100,7 @@ static int hifn_setup_crypto_req(struct ablkcipher_request *req, u8 op,
2121 u8 type, u8 mode) 2100 u8 type, u8 mode)
2122{ 2101{
2123 struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm); 2102 struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
2103 struct hifn_request_context *rctx = ablkcipher_request_ctx(req);
2124 unsigned ivsize; 2104 unsigned ivsize;
2125 2105
2126 ivsize = crypto_ablkcipher_ivsize(crypto_ablkcipher_reqtfm(req)); 2106 ivsize = crypto_ablkcipher_ivsize(crypto_ablkcipher_reqtfm(req));
@@ -2141,11 +2121,11 @@ static int hifn_setup_crypto_req(struct ablkcipher_request *req, u8 op,
2141 type = ACRYPTO_TYPE_AES_256; 2121 type = ACRYPTO_TYPE_AES_256;
2142 } 2122 }
2143 2123
2144 ctx->op = op; 2124 rctx->op = op;
2145 ctx->mode = mode; 2125 rctx->mode = mode;
2146 ctx->type = type; 2126 rctx->type = type;
2147 ctx->iv = req->info; 2127 rctx->iv = req->info;
2148 ctx->ivsize = ivsize; 2128 rctx->ivsize = ivsize;
2149 2129
2150 /* 2130 /*
2151 * HEAVY TODO: needs to kick Herbert XU to write documentation. 2131 * HEAVY TODO: needs to kick Herbert XU to write documentation.
@@ -2158,7 +2138,7 @@ static int hifn_setup_crypto_req(struct ablkcipher_request *req, u8 op,
2158 2138
2159static int hifn_process_queue(struct hifn_device *dev) 2139static int hifn_process_queue(struct hifn_device *dev)
2160{ 2140{
2161 struct crypto_async_request *async_req; 2141 struct crypto_async_request *async_req, *backlog;
2162 struct hifn_context *ctx; 2142 struct hifn_context *ctx;
2163 struct ablkcipher_request *req; 2143 struct ablkcipher_request *req;
2164 unsigned long flags; 2144 unsigned long flags;
@@ -2166,12 +2146,16 @@ static int hifn_process_queue(struct hifn_device *dev)
2166 2146
2167 while (dev->started < HIFN_QUEUE_LENGTH) { 2147 while (dev->started < HIFN_QUEUE_LENGTH) {
2168 spin_lock_irqsave(&dev->lock, flags); 2148 spin_lock_irqsave(&dev->lock, flags);
2149 backlog = crypto_get_backlog(&dev->queue);
2169 async_req = crypto_dequeue_request(&dev->queue); 2150 async_req = crypto_dequeue_request(&dev->queue);
2170 spin_unlock_irqrestore(&dev->lock, flags); 2151 spin_unlock_irqrestore(&dev->lock, flags);
2171 2152
2172 if (!async_req) 2153 if (!async_req)
2173 break; 2154 break;
2174 2155
2156 if (backlog)
2157 backlog->complete(backlog, -EINPROGRESS);
2158
2175 ctx = crypto_tfm_ctx(async_req->tfm); 2159 ctx = crypto_tfm_ctx(async_req->tfm);
2176 req = container_of(async_req, struct ablkcipher_request, base); 2160 req = container_of(async_req, struct ablkcipher_request, base);
2177 2161
@@ -2496,7 +2480,7 @@ static int hifn_cra_init(struct crypto_tfm *tfm)
2496 struct hifn_context *ctx = crypto_tfm_ctx(tfm); 2480 struct hifn_context *ctx = crypto_tfm_ctx(tfm);
2497 2481
2498 ctx->dev = ha->dev; 2482 ctx->dev = ha->dev;
2499 2483 tfm->crt_ablkcipher.reqsize = sizeof(struct hifn_request_context);
2500 return 0; 2484 return 0;
2501} 2485}
2502 2486
@@ -2574,7 +2558,10 @@ static void hifn_tasklet_callback(unsigned long data)
2574 * (like dev->success), but they are used in process 2558 * (like dev->success), but they are used in process
2575 * context or update is atomic (like setting dev->sa[i] to NULL). 2559 * context or update is atomic (like setting dev->sa[i] to NULL).
2576 */ 2560 */
2577 hifn_check_for_completion(dev, 0); 2561 hifn_clear_rings(dev, 0);
2562
2563 if (dev->started < HIFN_QUEUE_LENGTH && dev->queue.qlen)
2564 hifn_process_queue(dev);
2578} 2565}
2579 2566
2580static int hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id) 2567static int hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id)
@@ -2631,22 +2618,11 @@ static int hifn_probe(struct pci_dev *pdev, const struct pci_device_id *id)
2631 goto err_out_unmap_bars; 2618 goto err_out_unmap_bars;
2632 } 2619 }
2633 2620
2634 dev->result_mem = __get_free_pages(GFP_KERNEL, HIFN_MAX_RESULT_ORDER);
2635 if (!dev->result_mem) {
2636 dprintk("Failed to allocate %d pages for result_mem.\n",
2637 HIFN_MAX_RESULT_ORDER);
2638 goto err_out_unmap_bars;
2639 }
2640 memset((void *)dev->result_mem, 0, PAGE_SIZE*(1<<HIFN_MAX_RESULT_ORDER));
2641
2642 dev->dst = pci_map_single(pdev, (void *)dev->result_mem,
2643 PAGE_SIZE << HIFN_MAX_RESULT_ORDER, PCI_DMA_FROMDEVICE);
2644
2645 dev->desc_virt = pci_alloc_consistent(pdev, sizeof(struct hifn_dma), 2621 dev->desc_virt = pci_alloc_consistent(pdev, sizeof(struct hifn_dma),
2646 &dev->desc_dma); 2622 &dev->desc_dma);
2647 if (!dev->desc_virt) { 2623 if (!dev->desc_virt) {
2648 dprintk("Failed to allocate descriptor rings.\n"); 2624 dprintk("Failed to allocate descriptor rings.\n");
2649 goto err_out_free_result_pages; 2625 goto err_out_unmap_bars;
2650 } 2626 }
2651 memset(dev->desc_virt, 0, sizeof(struct hifn_dma)); 2627 memset(dev->desc_virt, 0, sizeof(struct hifn_dma));
2652 2628
@@ -2706,11 +2682,6 @@ err_out_free_desc:
2706 pci_free_consistent(pdev, sizeof(struct hifn_dma), 2682 pci_free_consistent(pdev, sizeof(struct hifn_dma),
2707 dev->desc_virt, dev->desc_dma); 2683 dev->desc_virt, dev->desc_dma);
2708 2684
2709err_out_free_result_pages:
2710 pci_unmap_single(pdev, dev->dst, PAGE_SIZE << HIFN_MAX_RESULT_ORDER,
2711 PCI_DMA_FROMDEVICE);
2712 free_pages(dev->result_mem, HIFN_MAX_RESULT_ORDER);
2713
2714err_out_unmap_bars: 2685err_out_unmap_bars:
2715 for (i=0; i<3; ++i) 2686 for (i=0; i<3; ++i)
2716 if (dev->bar[i]) 2687 if (dev->bar[i])
@@ -2748,10 +2719,6 @@ static void hifn_remove(struct pci_dev *pdev)
2748 2719
2749 pci_free_consistent(pdev, sizeof(struct hifn_dma), 2720 pci_free_consistent(pdev, sizeof(struct hifn_dma),
2750 dev->desc_virt, dev->desc_dma); 2721 dev->desc_virt, dev->desc_dma);
2751 pci_unmap_single(pdev, dev->dst,
2752 PAGE_SIZE << HIFN_MAX_RESULT_ORDER,
2753 PCI_DMA_FROMDEVICE);
2754 free_pages(dev->result_mem, HIFN_MAX_RESULT_ORDER);
2755 for (i=0; i<3; ++i) 2722 for (i=0; i<3; ++i)
2756 if (dev->bar[i]) 2723 if (dev->bar[i])
2757 iounmap(dev->bar[i]); 2724 iounmap(dev->bar[i]);
@@ -2782,6 +2749,11 @@ static int __devinit hifn_init(void)
2782 unsigned int freq; 2749 unsigned int freq;
2783 int err; 2750 int err;
2784 2751
2752 if (sizeof(dma_addr_t) > 4) {
2753 printk(KERN_INFO "HIFN supports only 32-bit addresses.\n");
2754 return -EINVAL;
2755 }
2756
2785 if (strncmp(hifn_pll_ref, "ext", 3) && 2757 if (strncmp(hifn_pll_ref, "ext", 3) &&
2786 strncmp(hifn_pll_ref, "pci", 3)) { 2758 strncmp(hifn_pll_ref, "pci", 3)) {
2787 printk(KERN_ERR "hifn795x: invalid hifn_pll_ref clock, " 2759 printk(KERN_ERR "hifn795x: invalid hifn_pll_ref clock, "
diff --git a/drivers/crypto/padlock-aes.c b/drivers/crypto/padlock-aes.c
index bf2917d197a0..856b3cc25583 100644
--- a/drivers/crypto/padlock-aes.c
+++ b/drivers/crypto/padlock-aes.c
@@ -15,6 +15,8 @@
15#include <linux/errno.h> 15#include <linux/errno.h>
16#include <linux/interrupt.h> 16#include <linux/interrupt.h>
17#include <linux/kernel.h> 17#include <linux/kernel.h>
18#include <linux/percpu.h>
19#include <linux/smp.h>
18#include <asm/byteorder.h> 20#include <asm/byteorder.h>
19#include <asm/i387.h> 21#include <asm/i387.h>
20#include "padlock.h" 22#include "padlock.h"
@@ -49,6 +51,8 @@ struct aes_ctx {
49 u32 *D; 51 u32 *D;
50}; 52};
51 53
54static DEFINE_PER_CPU(struct cword *, last_cword);
55
52/* Tells whether the ACE is capable to generate 56/* Tells whether the ACE is capable to generate
53 the extended key for a given key_len. */ 57 the extended key for a given key_len. */
54static inline int 58static inline int
@@ -89,6 +93,7 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
89 const __le32 *key = (const __le32 *)in_key; 93 const __le32 *key = (const __le32 *)in_key;
90 u32 *flags = &tfm->crt_flags; 94 u32 *flags = &tfm->crt_flags;
91 struct crypto_aes_ctx gen_aes; 95 struct crypto_aes_ctx gen_aes;
96 int cpu;
92 97
93 if (key_len % 8) { 98 if (key_len % 8) {
94 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 99 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
@@ -118,7 +123,7 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
118 123
119 /* Don't generate extended keys if the hardware can do it. */ 124 /* Don't generate extended keys if the hardware can do it. */
120 if (aes_hw_extkey_available(key_len)) 125 if (aes_hw_extkey_available(key_len))
121 return 0; 126 goto ok;
122 127
123 ctx->D = ctx->d_data; 128 ctx->D = ctx->d_data;
124 ctx->cword.encrypt.keygen = 1; 129 ctx->cword.encrypt.keygen = 1;
@@ -131,15 +136,30 @@ static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
131 136
132 memcpy(ctx->E, gen_aes.key_enc, AES_MAX_KEYLENGTH); 137 memcpy(ctx->E, gen_aes.key_enc, AES_MAX_KEYLENGTH);
133 memcpy(ctx->D, gen_aes.key_dec, AES_MAX_KEYLENGTH); 138 memcpy(ctx->D, gen_aes.key_dec, AES_MAX_KEYLENGTH);
139
140ok:
141 for_each_online_cpu(cpu)
142 if (&ctx->cword.encrypt == per_cpu(last_cword, cpu) ||
143 &ctx->cword.decrypt == per_cpu(last_cword, cpu))
144 per_cpu(last_cword, cpu) = NULL;
145
134 return 0; 146 return 0;
135} 147}
136 148
137/* ====== Encryption/decryption routines ====== */ 149/* ====== Encryption/decryption routines ====== */
138 150
139/* These are the real call to PadLock. */ 151/* These are the real call to PadLock. */
140static inline void padlock_reset_key(void) 152static inline void padlock_reset_key(struct cword *cword)
153{
154 int cpu = raw_smp_processor_id();
155
156 if (cword != per_cpu(last_cword, cpu))
157 asm volatile ("pushfl; popfl");
158}
159
160static inline void padlock_store_cword(struct cword *cword)
141{ 161{
142 asm volatile ("pushfl; popfl"); 162 per_cpu(last_cword, raw_smp_processor_id()) = cword;
143} 163}
144 164
145/* 165/*
@@ -149,7 +169,7 @@ static inline void padlock_reset_key(void)
149 */ 169 */
150 170
151static inline void padlock_xcrypt(const u8 *input, u8 *output, void *key, 171static inline void padlock_xcrypt(const u8 *input, u8 *output, void *key,
152 void *control_word) 172 struct cword *control_word)
153{ 173{
154 asm volatile (".byte 0xf3,0x0f,0xa7,0xc8" /* rep xcryptecb */ 174 asm volatile (".byte 0xf3,0x0f,0xa7,0xc8" /* rep xcryptecb */
155 : "+S"(input), "+D"(output) 175 : "+S"(input), "+D"(output)
@@ -213,22 +233,24 @@ static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
213{ 233{
214 struct aes_ctx *ctx = aes_ctx(tfm); 234 struct aes_ctx *ctx = aes_ctx(tfm);
215 int ts_state; 235 int ts_state;
216 padlock_reset_key();
217 236
237 padlock_reset_key(&ctx->cword.encrypt);
218 ts_state = irq_ts_save(); 238 ts_state = irq_ts_save();
219 aes_crypt(in, out, ctx->E, &ctx->cword.encrypt); 239 aes_crypt(in, out, ctx->E, &ctx->cword.encrypt);
220 irq_ts_restore(ts_state); 240 irq_ts_restore(ts_state);
241 padlock_store_cword(&ctx->cword.encrypt);
221} 242}
222 243
223static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 244static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
224{ 245{
225 struct aes_ctx *ctx = aes_ctx(tfm); 246 struct aes_ctx *ctx = aes_ctx(tfm);
226 int ts_state; 247 int ts_state;
227 padlock_reset_key();
228 248
249 padlock_reset_key(&ctx->cword.encrypt);
229 ts_state = irq_ts_save(); 250 ts_state = irq_ts_save();
230 aes_crypt(in, out, ctx->D, &ctx->cword.decrypt); 251 aes_crypt(in, out, ctx->D, &ctx->cword.decrypt);
231 irq_ts_restore(ts_state); 252 irq_ts_restore(ts_state);
253 padlock_store_cword(&ctx->cword.encrypt);
232} 254}
233 255
234static struct crypto_alg aes_alg = { 256static struct crypto_alg aes_alg = {
@@ -261,7 +283,7 @@ static int ecb_aes_encrypt(struct blkcipher_desc *desc,
261 int err; 283 int err;
262 int ts_state; 284 int ts_state;
263 285
264 padlock_reset_key(); 286 padlock_reset_key(&ctx->cword.encrypt);
265 287
266 blkcipher_walk_init(&walk, dst, src, nbytes); 288 blkcipher_walk_init(&walk, dst, src, nbytes);
267 err = blkcipher_walk_virt(desc, &walk); 289 err = blkcipher_walk_virt(desc, &walk);
@@ -276,6 +298,8 @@ static int ecb_aes_encrypt(struct blkcipher_desc *desc,
276 } 298 }
277 irq_ts_restore(ts_state); 299 irq_ts_restore(ts_state);
278 300
301 padlock_store_cword(&ctx->cword.encrypt);
302
279 return err; 303 return err;
280} 304}
281 305
@@ -288,7 +312,7 @@ static int ecb_aes_decrypt(struct blkcipher_desc *desc,
288 int err; 312 int err;
289 int ts_state; 313 int ts_state;
290 314
291 padlock_reset_key(); 315 padlock_reset_key(&ctx->cword.decrypt);
292 316
293 blkcipher_walk_init(&walk, dst, src, nbytes); 317 blkcipher_walk_init(&walk, dst, src, nbytes);
294 err = blkcipher_walk_virt(desc, &walk); 318 err = blkcipher_walk_virt(desc, &walk);
@@ -302,6 +326,9 @@ static int ecb_aes_decrypt(struct blkcipher_desc *desc,
302 err = blkcipher_walk_done(desc, &walk, nbytes); 326 err = blkcipher_walk_done(desc, &walk, nbytes);
303 } 327 }
304 irq_ts_restore(ts_state); 328 irq_ts_restore(ts_state);
329
330 padlock_store_cword(&ctx->cword.encrypt);
331
305 return err; 332 return err;
306} 333}
307 334
@@ -336,7 +363,7 @@ static int cbc_aes_encrypt(struct blkcipher_desc *desc,
336 int err; 363 int err;
337 int ts_state; 364 int ts_state;
338 365
339 padlock_reset_key(); 366 padlock_reset_key(&ctx->cword.encrypt);
340 367
341 blkcipher_walk_init(&walk, dst, src, nbytes); 368 blkcipher_walk_init(&walk, dst, src, nbytes);
342 err = blkcipher_walk_virt(desc, &walk); 369 err = blkcipher_walk_virt(desc, &walk);
@@ -353,6 +380,8 @@ static int cbc_aes_encrypt(struct blkcipher_desc *desc,
353 } 380 }
354 irq_ts_restore(ts_state); 381 irq_ts_restore(ts_state);
355 382
383 padlock_store_cword(&ctx->cword.decrypt);
384
356 return err; 385 return err;
357} 386}
358 387
@@ -365,7 +394,7 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc,
365 int err; 394 int err;
366 int ts_state; 395 int ts_state;
367 396
368 padlock_reset_key(); 397 padlock_reset_key(&ctx->cword.encrypt);
369 398
370 blkcipher_walk_init(&walk, dst, src, nbytes); 399 blkcipher_walk_init(&walk, dst, src, nbytes);
371 err = blkcipher_walk_virt(desc, &walk); 400 err = blkcipher_walk_virt(desc, &walk);
@@ -380,6 +409,9 @@ static int cbc_aes_decrypt(struct blkcipher_desc *desc,
380 } 409 }
381 410
382 irq_ts_restore(ts_state); 411 irq_ts_restore(ts_state);
412
413 padlock_store_cword(&ctx->cword.encrypt);
414
383 return err; 415 return err;
384} 416}
385 417
diff --git a/drivers/crypto/talitos.c b/drivers/crypto/talitos.c
index 24607669a52b..a3918c16b3db 100644
--- a/drivers/crypto/talitos.c
+++ b/drivers/crypto/talitos.c
@@ -127,7 +127,6 @@ struct talitos_private {
127 127
128 /* request callback tasklet */ 128 /* request callback tasklet */
129 struct tasklet_struct done_task; 129 struct tasklet_struct done_task;
130 struct tasklet_struct error_task;
131 130
132 /* list of registered algorithms */ 131 /* list of registered algorithms */
133 struct list_head alg_list; 132 struct list_head alg_list;
@@ -138,6 +137,7 @@ struct talitos_private {
138 137
139/* .features flag */ 138/* .features flag */
140#define TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT 0x00000001 139#define TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT 0x00000001
140#define TALITOS_FTR_HW_AUTH_CHECK 0x00000002
141 141
142/* 142/*
143 * map virtual single (contiguous) pointer to h/w descriptor pointer 143 * map virtual single (contiguous) pointer to h/w descriptor pointer
@@ -184,6 +184,11 @@ static int reset_channel(struct device *dev, int ch)
184 setbits32(priv->reg + TALITOS_CCCR_LO(ch), TALITOS_CCCR_LO_CDWE | 184 setbits32(priv->reg + TALITOS_CCCR_LO(ch), TALITOS_CCCR_LO_CDWE |
185 TALITOS_CCCR_LO_CDIE); 185 TALITOS_CCCR_LO_CDIE);
186 186
187 /* and ICCR writeback, if available */
188 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
189 setbits32(priv->reg + TALITOS_CCCR_LO(ch),
190 TALITOS_CCCR_LO_IWSE);
191
187 return 0; 192 return 0;
188} 193}
189 194
@@ -239,6 +244,11 @@ static int init_device(struct device *dev)
239 setbits32(priv->reg + TALITOS_IMR, TALITOS_IMR_INIT); 244 setbits32(priv->reg + TALITOS_IMR, TALITOS_IMR_INIT);
240 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS_IMR_LO_INIT); 245 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS_IMR_LO_INIT);
241 246
247 /* disable integrity check error interrupts (use writeback instead) */
248 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
249 setbits32(priv->reg + TALITOS_MDEUICR_LO,
250 TALITOS_MDEUICR_LO_ICE);
251
242 return 0; 252 return 0;
243} 253}
244 254
@@ -370,6 +380,12 @@ static void talitos_done(unsigned long data)
370 380
371 for (ch = 0; ch < priv->num_channels; ch++) 381 for (ch = 0; ch < priv->num_channels; ch++)
372 flush_channel(dev, ch, 0, 0); 382 flush_channel(dev, ch, 0, 0);
383
384 /* At this point, all completed channels have been processed.
385 * Unmask done interrupts for channels completed later on.
386 */
387 setbits32(priv->reg + TALITOS_IMR, TALITOS_IMR_INIT);
388 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS_IMR_LO_INIT);
373} 389}
374 390
375/* 391/*
@@ -469,16 +485,13 @@ static void report_eu_error(struct device *dev, int ch, struct talitos_desc *des
469/* 485/*
470 * recover from error interrupts 486 * recover from error interrupts
471 */ 487 */
472static void talitos_error(unsigned long data) 488static void talitos_error(unsigned long data, u32 isr, u32 isr_lo)
473{ 489{
474 struct device *dev = (struct device *)data; 490 struct device *dev = (struct device *)data;
475 struct talitos_private *priv = dev_get_drvdata(dev); 491 struct talitos_private *priv = dev_get_drvdata(dev);
476 unsigned int timeout = TALITOS_TIMEOUT; 492 unsigned int timeout = TALITOS_TIMEOUT;
477 int ch, error, reset_dev = 0, reset_ch = 0; 493 int ch, error, reset_dev = 0, reset_ch = 0;
478 u32 isr, isr_lo, v, v_lo; 494 u32 v, v_lo;
479
480 isr = in_be32(priv->reg + TALITOS_ISR);
481 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO);
482 495
483 for (ch = 0; ch < priv->num_channels; ch++) { 496 for (ch = 0; ch < priv->num_channels; ch++) {
484 /* skip channels without errors */ 497 /* skip channels without errors */
@@ -560,16 +573,19 @@ static irqreturn_t talitos_interrupt(int irq, void *data)
560 573
561 isr = in_be32(priv->reg + TALITOS_ISR); 574 isr = in_be32(priv->reg + TALITOS_ISR);
562 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); 575 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO);
563 576 /* Acknowledge interrupt */
564 /* ack */
565 out_be32(priv->reg + TALITOS_ICR, isr); 577 out_be32(priv->reg + TALITOS_ICR, isr);
566 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); 578 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo);
567 579
568 if (unlikely((isr & ~TALITOS_ISR_CHDONE) || isr_lo)) 580 if (unlikely((isr & ~TALITOS_ISR_CHDONE) || isr_lo))
569 talitos_error((unsigned long)data); 581 talitos_error((unsigned long)data, isr, isr_lo);
570 else 582 else
571 if (likely(isr & TALITOS_ISR_CHDONE)) 583 if (likely(isr & TALITOS_ISR_CHDONE)) {
584 /* mask further done interrupts. */
585 clrbits32(priv->reg + TALITOS_IMR, TALITOS_IMR_DONE);
586 /* done_task will unmask done interrupts at exit */
572 tasklet_schedule(&priv->done_task); 587 tasklet_schedule(&priv->done_task);
588 }
573 589
574 return (isr || isr_lo) ? IRQ_HANDLED : IRQ_NONE; 590 return (isr || isr_lo) ? IRQ_HANDLED : IRQ_NONE;
575} 591}
@@ -802,7 +818,7 @@ static void ipsec_esp_encrypt_done(struct device *dev,
802 aead_request_complete(areq, err); 818 aead_request_complete(areq, err);
803} 819}
804 820
805static void ipsec_esp_decrypt_done(struct device *dev, 821static void ipsec_esp_decrypt_swauth_done(struct device *dev,
806 struct talitos_desc *desc, void *context, 822 struct talitos_desc *desc, void *context,
807 int err) 823 int err)
808{ 824{
@@ -834,6 +850,27 @@ static void ipsec_esp_decrypt_done(struct device *dev,
834 aead_request_complete(req, err); 850 aead_request_complete(req, err);
835} 851}
836 852
853static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
854 struct talitos_desc *desc, void *context,
855 int err)
856{
857 struct aead_request *req = context;
858 struct ipsec_esp_edesc *edesc =
859 container_of(desc, struct ipsec_esp_edesc, desc);
860
861 ipsec_esp_unmap(dev, edesc, req);
862
863 /* check ICV auth status */
864 if (!err)
865 if ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
866 DESC_HDR_LO_ICCR1_PASS)
867 err = -EBADMSG;
868
869 kfree(edesc);
870
871 aead_request_complete(req, err);
872}
873
837/* 874/*
838 * convert scatterlist to SEC h/w link table format 875 * convert scatterlist to SEC h/w link table format
839 * stop at cryptlen bytes 876 * stop at cryptlen bytes
@@ -887,6 +924,7 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq,
887 unsigned int authsize = ctx->authsize; 924 unsigned int authsize = ctx->authsize;
888 unsigned int ivsize; 925 unsigned int ivsize;
889 int sg_count, ret; 926 int sg_count, ret;
927 int sg_link_tbl_len;
890 928
891 /* hmac key */ 929 /* hmac key */
892 map_single_talitos_ptr(dev, &desc->ptr[0], ctx->authkeylen, &ctx->key, 930 map_single_talitos_ptr(dev, &desc->ptr[0], ctx->authkeylen, &ctx->key,
@@ -924,33 +962,19 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq,
924 if (sg_count == 1) { 962 if (sg_count == 1) {
925 desc->ptr[4].ptr = cpu_to_be32(sg_dma_address(areq->src)); 963 desc->ptr[4].ptr = cpu_to_be32(sg_dma_address(areq->src));
926 } else { 964 } else {
927 sg_count = sg_to_link_tbl(areq->src, sg_count, cryptlen, 965 sg_link_tbl_len = cryptlen;
966
967 if ((edesc->desc.hdr & DESC_HDR_MODE1_MDEU_CICV) &&
968 (edesc->desc.hdr & DESC_HDR_MODE0_ENCRYPT) == 0) {
969 sg_link_tbl_len = cryptlen + authsize;
970 }
971 sg_count = sg_to_link_tbl(areq->src, sg_count, sg_link_tbl_len,
928 &edesc->link_tbl[0]); 972 &edesc->link_tbl[0]);
929 if (sg_count > 1) { 973 if (sg_count > 1) {
930 struct talitos_ptr *link_tbl_ptr =
931 &edesc->link_tbl[sg_count-1];
932 struct scatterlist *sg;
933 struct talitos_private *priv = dev_get_drvdata(dev);
934
935 desc->ptr[4].j_extent |= DESC_PTR_LNKTBL_JUMP; 974 desc->ptr[4].j_extent |= DESC_PTR_LNKTBL_JUMP;
936 desc->ptr[4].ptr = cpu_to_be32(edesc->dma_link_tbl); 975 desc->ptr[4].ptr = cpu_to_be32(edesc->dma_link_tbl);
937 dma_sync_single_for_device(ctx->dev, edesc->dma_link_tbl, 976 dma_sync_single_for_device(ctx->dev, edesc->dma_link_tbl,
938 edesc->dma_len, DMA_BIDIRECTIONAL); 977 edesc->dma_len, DMA_BIDIRECTIONAL);
939 /* If necessary for this SEC revision,
940 * add a link table entry for ICV.
941 */
942 if ((priv->features &
943 TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT) &&
944 (edesc->desc.hdr & DESC_HDR_MODE0_ENCRYPT) == 0) {
945 link_tbl_ptr->j_extent = 0;
946 link_tbl_ptr++;
947 link_tbl_ptr->j_extent = DESC_PTR_LNKTBL_RETURN;
948 link_tbl_ptr->len = cpu_to_be16(authsize);
949 sg = sg_last(areq->src, edesc->src_nents ? : 1);
950 link_tbl_ptr->ptr = cpu_to_be32(
951 (char *)sg_dma_address(sg)
952 + sg->length - authsize);
953 }
954 } else { 978 } else {
955 /* Only one segment now, so no link tbl needed */ 979 /* Only one segment now, so no link tbl needed */
956 desc->ptr[4].ptr = cpu_to_be32(sg_dma_address(areq->src)); 980 desc->ptr[4].ptr = cpu_to_be32(sg_dma_address(areq->src));
@@ -975,13 +999,9 @@ static int ipsec_esp(struct ipsec_esp_edesc *edesc, struct aead_request *areq,
975 desc->ptr[5].ptr = cpu_to_be32((struct talitos_ptr *) 999 desc->ptr[5].ptr = cpu_to_be32((struct talitos_ptr *)
976 edesc->dma_link_tbl + 1000 edesc->dma_link_tbl +
977 edesc->src_nents + 1); 1001 edesc->src_nents + 1);
978 if (areq->src == areq->dst) { 1002 sg_count = sg_to_link_tbl(areq->dst, sg_count, cryptlen,
979 memcpy(link_tbl_ptr, &edesc->link_tbl[0], 1003 link_tbl_ptr);
980 edesc->src_nents * sizeof(struct talitos_ptr)); 1004
981 } else {
982 sg_count = sg_to_link_tbl(areq->dst, sg_count, cryptlen,
983 link_tbl_ptr);
984 }
985 /* Add an entry to the link table for ICV data */ 1005 /* Add an entry to the link table for ICV data */
986 link_tbl_ptr += sg_count - 1; 1006 link_tbl_ptr += sg_count - 1;
987 link_tbl_ptr->j_extent = 0; 1007 link_tbl_ptr->j_extent = 0;
@@ -1106,11 +1126,14 @@ static int aead_authenc_encrypt(struct aead_request *req)
1106 return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_encrypt_done); 1126 return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_encrypt_done);
1107} 1127}
1108 1128
1129
1130
1109static int aead_authenc_decrypt(struct aead_request *req) 1131static int aead_authenc_decrypt(struct aead_request *req)
1110{ 1132{
1111 struct crypto_aead *authenc = crypto_aead_reqtfm(req); 1133 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1112 struct talitos_ctx *ctx = crypto_aead_ctx(authenc); 1134 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1113 unsigned int authsize = ctx->authsize; 1135 unsigned int authsize = ctx->authsize;
1136 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1114 struct ipsec_esp_edesc *edesc; 1137 struct ipsec_esp_edesc *edesc;
1115 struct scatterlist *sg; 1138 struct scatterlist *sg;
1116 void *icvdata; 1139 void *icvdata;
@@ -1122,22 +1145,39 @@ static int aead_authenc_decrypt(struct aead_request *req)
1122 if (IS_ERR(edesc)) 1145 if (IS_ERR(edesc))
1123 return PTR_ERR(edesc); 1146 return PTR_ERR(edesc);
1124 1147
1125 /* stash incoming ICV for later cmp with ICV generated by the h/w */ 1148 if ((priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
1126 if (edesc->dma_len) 1149 (((!edesc->src_nents && !edesc->dst_nents) ||
1127 icvdata = &edesc->link_tbl[edesc->src_nents + 1150 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT))) {
1128 edesc->dst_nents + 2]; 1151
1129 else 1152 /* decrypt and check the ICV */
1130 icvdata = &edesc->link_tbl[0]; 1153 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND |
1154 DESC_HDR_MODE1_MDEU_CICV;
1155
1156 /* reset integrity check result bits */
1157 edesc->desc.hdr_lo = 0;
1158
1159 return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_decrypt_hwauth_done);
1160
1161 } else {
1162
1163 /* Have to check the ICV with software */
1131 1164
1132 sg = sg_last(req->src, edesc->src_nents ? : 1); 1165 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1166
1167 /* stash incoming ICV for later cmp with ICV generated by the h/w */
1168 if (edesc->dma_len)
1169 icvdata = &edesc->link_tbl[edesc->src_nents +
1170 edesc->dst_nents + 2];
1171 else
1172 icvdata = &edesc->link_tbl[0];
1133 1173
1134 memcpy(icvdata, (char *)sg_virt(sg) + sg->length - ctx->authsize, 1174 sg = sg_last(req->src, edesc->src_nents ? : 1);
1135 ctx->authsize);
1136 1175
1137 /* decrypt */ 1176 memcpy(icvdata, (char *)sg_virt(sg) + sg->length - ctx->authsize,
1138 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND; 1177 ctx->authsize);
1139 1178
1140 return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_decrypt_done); 1179 return ipsec_esp(edesc, req, NULL, 0, ipsec_esp_decrypt_swauth_done);
1180 }
1141} 1181}
1142 1182
1143static int aead_authenc_givencrypt( 1183static int aead_authenc_givencrypt(
@@ -1391,7 +1431,6 @@ static int talitos_remove(struct of_device *ofdev)
1391 } 1431 }
1392 1432
1393 tasklet_kill(&priv->done_task); 1433 tasklet_kill(&priv->done_task);
1394 tasklet_kill(&priv->error_task);
1395 1434
1396 iounmap(priv->reg); 1435 iounmap(priv->reg);
1397 1436
@@ -1451,10 +1490,9 @@ static int talitos_probe(struct of_device *ofdev,
1451 1490
1452 priv->ofdev = ofdev; 1491 priv->ofdev = ofdev;
1453 1492
1454 INIT_LIST_HEAD(&priv->alg_list);
1455
1456 tasklet_init(&priv->done_task, talitos_done, (unsigned long)dev); 1493 tasklet_init(&priv->done_task, talitos_done, (unsigned long)dev);
1457 tasklet_init(&priv->error_task, talitos_error, (unsigned long)dev); 1494
1495 INIT_LIST_HEAD(&priv->alg_list);
1458 1496
1459 priv->irq = irq_of_parse_and_map(np, 0); 1497 priv->irq = irq_of_parse_and_map(np, 0);
1460 1498
@@ -1508,6 +1546,9 @@ static int talitos_probe(struct of_device *ofdev,
1508 if (of_device_is_compatible(np, "fsl,sec3.0")) 1546 if (of_device_is_compatible(np, "fsl,sec3.0"))
1509 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT; 1547 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
1510 1548
1549 if (of_device_is_compatible(np, "fsl,sec2.1"))
1550 priv->features |= TALITOS_FTR_HW_AUTH_CHECK;
1551
1511 priv->head_lock = kmalloc(sizeof(spinlock_t) * priv->num_channels, 1552 priv->head_lock = kmalloc(sizeof(spinlock_t) * priv->num_channels,
1512 GFP_KERNEL); 1553 GFP_KERNEL);
1513 priv->tail_lock = kmalloc(sizeof(spinlock_t) * priv->num_channels, 1554 priv->tail_lock = kmalloc(sizeof(spinlock_t) * priv->num_channels,
@@ -1551,7 +1592,7 @@ static int talitos_probe(struct of_device *ofdev,
1551 goto err_out; 1592 goto err_out;
1552 } 1593 }
1553 for (i = 0; i < priv->num_channels; i++) 1594 for (i = 0; i < priv->num_channels; i++)
1554 atomic_set(&priv->submit_count[i], -priv->chfifo_len); 1595 atomic_set(&priv->submit_count[i], -(priv->chfifo_len - 1));
1555 1596
1556 priv->head = kzalloc(sizeof(int) * priv->num_channels, GFP_KERNEL); 1597 priv->head = kzalloc(sizeof(int) * priv->num_channels, GFP_KERNEL);
1557 priv->tail = kzalloc(sizeof(int) * priv->num_channels, GFP_KERNEL); 1598 priv->tail = kzalloc(sizeof(int) * priv->num_channels, GFP_KERNEL);
diff --git a/drivers/crypto/talitos.h b/drivers/crypto/talitos.h
index c48a405abf70..575981f0cfda 100644
--- a/drivers/crypto/talitos.h
+++ b/drivers/crypto/talitos.h
@@ -37,7 +37,8 @@
37#define TALITOS_MCR_LO 0x1038 37#define TALITOS_MCR_LO 0x1038
38#define TALITOS_MCR_SWR 0x1 /* s/w reset */ 38#define TALITOS_MCR_SWR 0x1 /* s/w reset */
39#define TALITOS_IMR 0x1008 /* interrupt mask register */ 39#define TALITOS_IMR 0x1008 /* interrupt mask register */
40#define TALITOS_IMR_INIT 0x10fff /* enable channel IRQs */ 40#define TALITOS_IMR_INIT 0x100ff /* enable channel IRQs */
41#define TALITOS_IMR_DONE 0x00055 /* done IRQs */
41#define TALITOS_IMR_LO 0x100C 42#define TALITOS_IMR_LO 0x100C
42#define TALITOS_IMR_LO_INIT 0x20000 /* allow RNGU error IRQs */ 43#define TALITOS_IMR_LO_INIT 0x20000 /* allow RNGU error IRQs */
43#define TALITOS_ISR 0x1010 /* interrupt status register */ 44#define TALITOS_ISR 0x1010 /* interrupt status register */
@@ -55,6 +56,7 @@
55#define TALITOS_CCCR_CONT 0x2 /* channel continue */ 56#define TALITOS_CCCR_CONT 0x2 /* channel continue */
56#define TALITOS_CCCR_RESET 0x1 /* channel reset */ 57#define TALITOS_CCCR_RESET 0x1 /* channel reset */
57#define TALITOS_CCCR_LO(ch) (ch * TALITOS_CH_STRIDE + 0x110c) 58#define TALITOS_CCCR_LO(ch) (ch * TALITOS_CH_STRIDE + 0x110c)
59#define TALITOS_CCCR_LO_IWSE 0x80 /* chan. ICCR writeback enab. */
58#define TALITOS_CCCR_LO_CDWE 0x10 /* chan. done writeback enab. */ 60#define TALITOS_CCCR_LO_CDWE 0x10 /* chan. done writeback enab. */
59#define TALITOS_CCCR_LO_NT 0x4 /* notification type */ 61#define TALITOS_CCCR_LO_NT 0x4 /* notification type */
60#define TALITOS_CCCR_LO_CDIE 0x2 /* channel done IRQ enable */ 62#define TALITOS_CCCR_LO_CDIE 0x2 /* channel done IRQ enable */
@@ -102,6 +104,9 @@
102#define TALITOS_AESUISR_LO 0x4034 104#define TALITOS_AESUISR_LO 0x4034
103#define TALITOS_MDEUISR 0x6030 /* message digest unit */ 105#define TALITOS_MDEUISR 0x6030 /* message digest unit */
104#define TALITOS_MDEUISR_LO 0x6034 106#define TALITOS_MDEUISR_LO 0x6034
107#define TALITOS_MDEUICR 0x6038 /* interrupt control */
108#define TALITOS_MDEUICR_LO 0x603c
109#define TALITOS_MDEUICR_LO_ICE 0x4000 /* integrity check IRQ enable */
105#define TALITOS_AFEUISR 0x8030 /* arc4 unit */ 110#define TALITOS_AFEUISR 0x8030 /* arc4 unit */
106#define TALITOS_AFEUISR_LO 0x8034 111#define TALITOS_AFEUISR_LO 0x8034
107#define TALITOS_RNGUISR 0xa030 /* random number unit */ 112#define TALITOS_RNGUISR 0xa030 /* random number unit */
@@ -129,31 +134,34 @@
129 */ 134 */
130 135
131/* written back when done */ 136/* written back when done */
132#define DESC_HDR_DONE __constant_cpu_to_be32(0xff000000) 137#define DESC_HDR_DONE cpu_to_be32(0xff000000)
138#define DESC_HDR_LO_ICCR1_MASK cpu_to_be32(0x00180000)
139#define DESC_HDR_LO_ICCR1_PASS cpu_to_be32(0x00080000)
140#define DESC_HDR_LO_ICCR1_FAIL cpu_to_be32(0x00100000)
133 141
134/* primary execution unit select */ 142/* primary execution unit select */
135#define DESC_HDR_SEL0_MASK __constant_cpu_to_be32(0xf0000000) 143#define DESC_HDR_SEL0_MASK cpu_to_be32(0xf0000000)
136#define DESC_HDR_SEL0_AFEU __constant_cpu_to_be32(0x10000000) 144#define DESC_HDR_SEL0_AFEU cpu_to_be32(0x10000000)
137#define DESC_HDR_SEL0_DEU __constant_cpu_to_be32(0x20000000) 145#define DESC_HDR_SEL0_DEU cpu_to_be32(0x20000000)
138#define DESC_HDR_SEL0_MDEUA __constant_cpu_to_be32(0x30000000) 146#define DESC_HDR_SEL0_MDEUA cpu_to_be32(0x30000000)
139#define DESC_HDR_SEL0_MDEUB __constant_cpu_to_be32(0xb0000000) 147#define DESC_HDR_SEL0_MDEUB cpu_to_be32(0xb0000000)
140#define DESC_HDR_SEL0_RNG __constant_cpu_to_be32(0x40000000) 148#define DESC_HDR_SEL0_RNG cpu_to_be32(0x40000000)
141#define DESC_HDR_SEL0_PKEU __constant_cpu_to_be32(0x50000000) 149#define DESC_HDR_SEL0_PKEU cpu_to_be32(0x50000000)
142#define DESC_HDR_SEL0_AESU __constant_cpu_to_be32(0x60000000) 150#define DESC_HDR_SEL0_AESU cpu_to_be32(0x60000000)
143#define DESC_HDR_SEL0_KEU __constant_cpu_to_be32(0x70000000) 151#define DESC_HDR_SEL0_KEU cpu_to_be32(0x70000000)
144#define DESC_HDR_SEL0_CRCU __constant_cpu_to_be32(0x80000000) 152#define DESC_HDR_SEL0_CRCU cpu_to_be32(0x80000000)
145 153
146/* primary execution unit mode (MODE0) and derivatives */ 154/* primary execution unit mode (MODE0) and derivatives */
147#define DESC_HDR_MODE0_ENCRYPT __constant_cpu_to_be32(0x00100000) 155#define DESC_HDR_MODE0_ENCRYPT cpu_to_be32(0x00100000)
148#define DESC_HDR_MODE0_AESU_CBC __constant_cpu_to_be32(0x00200000) 156#define DESC_HDR_MODE0_AESU_CBC cpu_to_be32(0x00200000)
149#define DESC_HDR_MODE0_DEU_CBC __constant_cpu_to_be32(0x00400000) 157#define DESC_HDR_MODE0_DEU_CBC cpu_to_be32(0x00400000)
150#define DESC_HDR_MODE0_DEU_3DES __constant_cpu_to_be32(0x00200000) 158#define DESC_HDR_MODE0_DEU_3DES cpu_to_be32(0x00200000)
151#define DESC_HDR_MODE0_MDEU_INIT __constant_cpu_to_be32(0x01000000) 159#define DESC_HDR_MODE0_MDEU_INIT cpu_to_be32(0x01000000)
152#define DESC_HDR_MODE0_MDEU_HMAC __constant_cpu_to_be32(0x00800000) 160#define DESC_HDR_MODE0_MDEU_HMAC cpu_to_be32(0x00800000)
153#define DESC_HDR_MODE0_MDEU_PAD __constant_cpu_to_be32(0x00400000) 161#define DESC_HDR_MODE0_MDEU_PAD cpu_to_be32(0x00400000)
154#define DESC_HDR_MODE0_MDEU_MD5 __constant_cpu_to_be32(0x00200000) 162#define DESC_HDR_MODE0_MDEU_MD5 cpu_to_be32(0x00200000)
155#define DESC_HDR_MODE0_MDEU_SHA256 __constant_cpu_to_be32(0x00100000) 163#define DESC_HDR_MODE0_MDEU_SHA256 cpu_to_be32(0x00100000)
156#define DESC_HDR_MODE0_MDEU_SHA1 __constant_cpu_to_be32(0x00000000) 164#define DESC_HDR_MODE0_MDEU_SHA1 cpu_to_be32(0x00000000)
157#define DESC_HDR_MODE0_MDEU_MD5_HMAC (DESC_HDR_MODE0_MDEU_MD5 | \ 165#define DESC_HDR_MODE0_MDEU_MD5_HMAC (DESC_HDR_MODE0_MDEU_MD5 | \
158 DESC_HDR_MODE0_MDEU_HMAC) 166 DESC_HDR_MODE0_MDEU_HMAC)
159#define DESC_HDR_MODE0_MDEU_SHA256_HMAC (DESC_HDR_MODE0_MDEU_SHA256 | \ 167#define DESC_HDR_MODE0_MDEU_SHA256_HMAC (DESC_HDR_MODE0_MDEU_SHA256 | \
@@ -162,18 +170,19 @@
162 DESC_HDR_MODE0_MDEU_HMAC) 170 DESC_HDR_MODE0_MDEU_HMAC)
163 171
164/* secondary execution unit select (SEL1) */ 172/* secondary execution unit select (SEL1) */
165#define DESC_HDR_SEL1_MASK __constant_cpu_to_be32(0x000f0000) 173#define DESC_HDR_SEL1_MASK cpu_to_be32(0x000f0000)
166#define DESC_HDR_SEL1_MDEUA __constant_cpu_to_be32(0x00030000) 174#define DESC_HDR_SEL1_MDEUA cpu_to_be32(0x00030000)
167#define DESC_HDR_SEL1_MDEUB __constant_cpu_to_be32(0x000b0000) 175#define DESC_HDR_SEL1_MDEUB cpu_to_be32(0x000b0000)
168#define DESC_HDR_SEL1_CRCU __constant_cpu_to_be32(0x00080000) 176#define DESC_HDR_SEL1_CRCU cpu_to_be32(0x00080000)
169 177
170/* secondary execution unit mode (MODE1) and derivatives */ 178/* secondary execution unit mode (MODE1) and derivatives */
171#define DESC_HDR_MODE1_MDEU_INIT __constant_cpu_to_be32(0x00001000) 179#define DESC_HDR_MODE1_MDEU_CICV cpu_to_be32(0x00004000)
172#define DESC_HDR_MODE1_MDEU_HMAC __constant_cpu_to_be32(0x00000800) 180#define DESC_HDR_MODE1_MDEU_INIT cpu_to_be32(0x00001000)
173#define DESC_HDR_MODE1_MDEU_PAD __constant_cpu_to_be32(0x00000400) 181#define DESC_HDR_MODE1_MDEU_HMAC cpu_to_be32(0x00000800)
174#define DESC_HDR_MODE1_MDEU_MD5 __constant_cpu_to_be32(0x00000200) 182#define DESC_HDR_MODE1_MDEU_PAD cpu_to_be32(0x00000400)
175#define DESC_HDR_MODE1_MDEU_SHA256 __constant_cpu_to_be32(0x00000100) 183#define DESC_HDR_MODE1_MDEU_MD5 cpu_to_be32(0x00000200)
176#define DESC_HDR_MODE1_MDEU_SHA1 __constant_cpu_to_be32(0x00000000) 184#define DESC_HDR_MODE1_MDEU_SHA256 cpu_to_be32(0x00000100)
185#define DESC_HDR_MODE1_MDEU_SHA1 cpu_to_be32(0x00000000)
177#define DESC_HDR_MODE1_MDEU_MD5_HMAC (DESC_HDR_MODE1_MDEU_MD5 | \ 186#define DESC_HDR_MODE1_MDEU_MD5_HMAC (DESC_HDR_MODE1_MDEU_MD5 | \
178 DESC_HDR_MODE1_MDEU_HMAC) 187 DESC_HDR_MODE1_MDEU_HMAC)
179#define DESC_HDR_MODE1_MDEU_SHA256_HMAC (DESC_HDR_MODE1_MDEU_SHA256 | \ 188#define DESC_HDR_MODE1_MDEU_SHA256_HMAC (DESC_HDR_MODE1_MDEU_SHA256 | \
@@ -182,16 +191,16 @@
182 DESC_HDR_MODE1_MDEU_HMAC) 191 DESC_HDR_MODE1_MDEU_HMAC)
183 192
184/* direction of overall data flow (DIR) */ 193/* direction of overall data flow (DIR) */
185#define DESC_HDR_DIR_INBOUND __constant_cpu_to_be32(0x00000002) 194#define DESC_HDR_DIR_INBOUND cpu_to_be32(0x00000002)
186 195
187/* request done notification (DN) */ 196/* request done notification (DN) */
188#define DESC_HDR_DONE_NOTIFY __constant_cpu_to_be32(0x00000001) 197#define DESC_HDR_DONE_NOTIFY cpu_to_be32(0x00000001)
189 198
190/* descriptor types */ 199/* descriptor types */
191#define DESC_HDR_TYPE_AESU_CTR_NONSNOOP __constant_cpu_to_be32(0 << 3) 200#define DESC_HDR_TYPE_AESU_CTR_NONSNOOP cpu_to_be32(0 << 3)
192#define DESC_HDR_TYPE_IPSEC_ESP __constant_cpu_to_be32(1 << 3) 201#define DESC_HDR_TYPE_IPSEC_ESP cpu_to_be32(1 << 3)
193#define DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU __constant_cpu_to_be32(2 << 3) 202#define DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU cpu_to_be32(2 << 3)
194#define DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU __constant_cpu_to_be32(4 << 3) 203#define DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU cpu_to_be32(4 << 3)
195 204
196/* link table extent field bits */ 205/* link table extent field bits */
197#define DESC_PTR_LNKTBL_JUMP 0x80 206#define DESC_PTR_LNKTBL_JUMP 0x80
diff --git a/include/crypto/aes.h b/include/crypto/aes.h
index 40008d67ee3d..656a4c66a568 100644
--- a/include/crypto/aes.h
+++ b/include/crypto/aes.h
@@ -23,10 +23,10 @@ struct crypto_aes_ctx {
23 u32 key_dec[AES_MAX_KEYLENGTH_U32]; 23 u32 key_dec[AES_MAX_KEYLENGTH_U32];
24}; 24};
25 25
26extern u32 crypto_ft_tab[4][256]; 26extern const u32 crypto_ft_tab[4][256];
27extern u32 crypto_fl_tab[4][256]; 27extern const u32 crypto_fl_tab[4][256];
28extern u32 crypto_it_tab[4][256]; 28extern const u32 crypto_it_tab[4][256];
29extern u32 crypto_il_tab[4][256]; 29extern const u32 crypto_il_tab[4][256];
30 30
31int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key, 31int crypto_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
32 unsigned int key_len); 32 unsigned int key_len);
diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h
index 60d06e784be3..010545436efa 100644
--- a/include/crypto/algapi.h
+++ b/include/crypto/algapi.h
@@ -22,9 +22,18 @@ struct seq_file;
22 22
23struct crypto_type { 23struct crypto_type {
24 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask); 24 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
25 unsigned int (*extsize)(struct crypto_alg *alg,
26 const struct crypto_type *frontend);
25 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask); 27 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
26 void (*exit)(struct crypto_tfm *tfm); 28 int (*init_tfm)(struct crypto_tfm *tfm,
29 const struct crypto_type *frontend);
27 void (*show)(struct seq_file *m, struct crypto_alg *alg); 30 void (*show)(struct seq_file *m, struct crypto_alg *alg);
31 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
32
33 unsigned int type;
34 unsigned int maskclear;
35 unsigned int maskset;
36 unsigned int tfmsize;
28}; 37};
29 38
30struct crypto_instance { 39struct crypto_instance {
@@ -239,6 +248,11 @@ static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn)
239 return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask)); 248 return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
240} 249}
241 250
251static inline void *crypto_hash_ctx(struct crypto_hash *tfm)
252{
253 return crypto_tfm_ctx(&tfm->base);
254}
255
242static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm) 256static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
243{ 257{
244 return crypto_tfm_ctx_aligned(&tfm->base); 258 return crypto_tfm_ctx_aligned(&tfm->base);
diff --git a/include/crypto/hash.h b/include/crypto/hash.h
index ee48ef8fb2ea..cd16d6e668ce 100644
--- a/include/crypto/hash.h
+++ b/include/crypto/hash.h
@@ -15,10 +15,40 @@
15 15
16#include <linux/crypto.h> 16#include <linux/crypto.h>
17 17
18struct shash_desc {
19 struct crypto_shash *tfm;
20 u32 flags;
21
22 void *__ctx[] CRYPTO_MINALIGN_ATTR;
23};
24
25struct shash_alg {
26 int (*init)(struct shash_desc *desc);
27 int (*reinit)(struct shash_desc *desc);
28 int (*update)(struct shash_desc *desc, const u8 *data,
29 unsigned int len);
30 int (*final)(struct shash_desc *desc, u8 *out);
31 int (*finup)(struct shash_desc *desc, const u8 *data,
32 unsigned int len, u8 *out);
33 int (*digest)(struct shash_desc *desc, const u8 *data,
34 unsigned int len, u8 *out);
35 int (*setkey)(struct crypto_shash *tfm, const u8 *key,
36 unsigned int keylen);
37
38 unsigned int descsize;
39 unsigned int digestsize;
40
41 struct crypto_alg base;
42};
43
18struct crypto_ahash { 44struct crypto_ahash {
19 struct crypto_tfm base; 45 struct crypto_tfm base;
20}; 46};
21 47
48struct crypto_shash {
49 struct crypto_tfm base;
50};
51
22static inline struct crypto_ahash *__crypto_ahash_cast(struct crypto_tfm *tfm) 52static inline struct crypto_ahash *__crypto_ahash_cast(struct crypto_tfm *tfm)
23{ 53{
24 return (struct crypto_ahash *)tfm; 54 return (struct crypto_ahash *)tfm;
@@ -87,6 +117,11 @@ static inline unsigned int crypto_ahash_reqsize(struct crypto_ahash *tfm)
87 return crypto_ahash_crt(tfm)->reqsize; 117 return crypto_ahash_crt(tfm)->reqsize;
88} 118}
89 119
120static inline void *ahash_request_ctx(struct ahash_request *req)
121{
122 return req->__ctx;
123}
124
90static inline int crypto_ahash_setkey(struct crypto_ahash *tfm, 125static inline int crypto_ahash_setkey(struct crypto_ahash *tfm,
91 const u8 *key, unsigned int keylen) 126 const u8 *key, unsigned int keylen)
92{ 127{
@@ -101,6 +136,14 @@ static inline int crypto_ahash_digest(struct ahash_request *req)
101 return crt->digest(req); 136 return crt->digest(req);
102} 137}
103 138
139static inline void crypto_ahash_export(struct ahash_request *req, u8 *out)
140{
141 memcpy(out, ahash_request_ctx(req),
142 crypto_ahash_reqsize(crypto_ahash_reqtfm(req)));
143}
144
145int crypto_ahash_import(struct ahash_request *req, const u8 *in);
146
104static inline int crypto_ahash_init(struct ahash_request *req) 147static inline int crypto_ahash_init(struct ahash_request *req)
105{ 148{
106 struct ahash_tfm *crt = crypto_ahash_crt(crypto_ahash_reqtfm(req)); 149 struct ahash_tfm *crt = crypto_ahash_crt(crypto_ahash_reqtfm(req));
@@ -169,4 +212,86 @@ static inline void ahash_request_set_crypt(struct ahash_request *req,
169 req->result = result; 212 req->result = result;
170} 213}
171 214
215struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
216 u32 mask);
217
218static inline struct crypto_tfm *crypto_shash_tfm(struct crypto_shash *tfm)
219{
220 return &tfm->base;
221}
222
223static inline void crypto_free_shash(struct crypto_shash *tfm)
224{
225 crypto_free_tfm(crypto_shash_tfm(tfm));
226}
227
228static inline unsigned int crypto_shash_alignmask(
229 struct crypto_shash *tfm)
230{
231 return crypto_tfm_alg_alignmask(crypto_shash_tfm(tfm));
232}
233
234static inline struct shash_alg *__crypto_shash_alg(struct crypto_alg *alg)
235{
236 return container_of(alg, struct shash_alg, base);
237}
238
239static inline struct shash_alg *crypto_shash_alg(struct crypto_shash *tfm)
240{
241 return __crypto_shash_alg(crypto_shash_tfm(tfm)->__crt_alg);
242}
243
244static inline unsigned int crypto_shash_digestsize(struct crypto_shash *tfm)
245{
246 return crypto_shash_alg(tfm)->digestsize;
247}
248
249static inline u32 crypto_shash_get_flags(struct crypto_shash *tfm)
250{
251 return crypto_tfm_get_flags(crypto_shash_tfm(tfm));
252}
253
254static inline void crypto_shash_set_flags(struct crypto_shash *tfm, u32 flags)
255{
256 crypto_tfm_set_flags(crypto_shash_tfm(tfm), flags);
257}
258
259static inline void crypto_shash_clear_flags(struct crypto_shash *tfm, u32 flags)
260{
261 crypto_tfm_clear_flags(crypto_shash_tfm(tfm), flags);
262}
263
264static inline unsigned int crypto_shash_descsize(struct crypto_shash *tfm)
265{
266 return crypto_shash_alg(tfm)->descsize;
267}
268
269static inline void *shash_desc_ctx(struct shash_desc *desc)
270{
271 return desc->__ctx;
272}
273
274int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
275 unsigned int keylen);
276int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
277 unsigned int len, u8 *out);
278
279static inline void crypto_shash_export(struct shash_desc *desc, u8 *out)
280{
281 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
282}
283
284int crypto_shash_import(struct shash_desc *desc, const u8 *in);
285
286static inline int crypto_shash_init(struct shash_desc *desc)
287{
288 return crypto_shash_alg(desc->tfm)->init(desc);
289}
290
291int crypto_shash_update(struct shash_desc *desc, const u8 *data,
292 unsigned int len);
293int crypto_shash_final(struct shash_desc *desc, u8 *out);
294int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
295 unsigned int len, u8 *out);
296
172#endif /* _CRYPTO_HASH_H */ 297#endif /* _CRYPTO_HASH_H */
diff --git a/include/crypto/internal/hash.h b/include/crypto/internal/hash.h
index 917ae57bad4a..82b70564bcab 100644
--- a/include/crypto/internal/hash.h
+++ b/include/crypto/internal/hash.h
@@ -39,6 +39,12 @@ extern const struct crypto_type crypto_ahash_type;
39int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err); 39int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err);
40int crypto_hash_walk_first(struct ahash_request *req, 40int crypto_hash_walk_first(struct ahash_request *req,
41 struct crypto_hash_walk *walk); 41 struct crypto_hash_walk *walk);
42int crypto_hash_walk_first_compat(struct hash_desc *hdesc,
43 struct crypto_hash_walk *walk,
44 struct scatterlist *sg, unsigned int len);
45
46int crypto_register_shash(struct shash_alg *alg);
47int crypto_unregister_shash(struct shash_alg *alg);
42 48
43static inline void *crypto_ahash_ctx(struct crypto_ahash *tfm) 49static inline void *crypto_ahash_ctx(struct crypto_ahash *tfm)
44{ 50{
@@ -63,16 +69,16 @@ static inline struct ahash_request *ahash_dequeue_request(
63 return ahash_request_cast(crypto_dequeue_request(queue)); 69 return ahash_request_cast(crypto_dequeue_request(queue));
64} 70}
65 71
66static inline void *ahash_request_ctx(struct ahash_request *req)
67{
68 return req->__ctx;
69}
70
71static inline int ahash_tfm_in_queue(struct crypto_queue *queue, 72static inline int ahash_tfm_in_queue(struct crypto_queue *queue,
72 struct crypto_ahash *tfm) 73 struct crypto_ahash *tfm)
73{ 74{
74 return crypto_tfm_in_queue(queue, crypto_ahash_tfm(tfm)); 75 return crypto_tfm_in_queue(queue, crypto_ahash_tfm(tfm));
75} 76}
76 77
78static inline void *crypto_shash_ctx(struct crypto_shash *tfm)
79{
80 return crypto_tfm_ctx(&tfm->base);
81}
82
77#endif /* _CRYPTO_INTERNAL_HASH_H */ 83#endif /* _CRYPTO_INTERNAL_HASH_H */
78 84
diff --git a/include/linux/crc32c.h b/include/linux/crc32c.h
index 508f512e5a2f..bd8b44d96bdc 100644
--- a/include/linux/crc32c.h
+++ b/include/linux/crc32c.h
@@ -3,9 +3,9 @@
3 3
4#include <linux/types.h> 4#include <linux/types.h>
5 5
6extern u32 crc32c_le(u32 crc, unsigned char const *address, size_t length); 6extern u32 crc32c(u32 crc, const void *address, unsigned int length);
7extern u32 crc32c_be(u32 crc, unsigned char const *address, size_t length);
8 7
9#define crc32c(seed, data, length) crc32c_le(seed, (unsigned char const *)data, length) 8/* This macro exists for backwards-compatibility. */
9#define crc32c_le crc32c
10 10
11#endif /* _LINUX_CRC32C_H */ 11#endif /* _LINUX_CRC32C_H */
diff --git a/include/linux/crypto.h b/include/linux/crypto.h
index 3d2317e4af2e..3bacd71509fb 100644
--- a/include/linux/crypto.h
+++ b/include/linux/crypto.h
@@ -36,7 +36,8 @@
36#define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005 36#define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005
37#define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006 37#define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006
38#define CRYPTO_ALG_TYPE_DIGEST 0x00000008 38#define CRYPTO_ALG_TYPE_DIGEST 0x00000008
39#define CRYPTO_ALG_TYPE_HASH 0x00000009 39#define CRYPTO_ALG_TYPE_HASH 0x00000008
40#define CRYPTO_ALG_TYPE_SHASH 0x00000009
40#define CRYPTO_ALG_TYPE_AHASH 0x0000000a 41#define CRYPTO_ALG_TYPE_AHASH 0x0000000a
41#define CRYPTO_ALG_TYPE_RNG 0x0000000c 42#define CRYPTO_ALG_TYPE_RNG 0x0000000c
42 43
@@ -220,6 +221,7 @@ struct ablkcipher_alg {
220 221
221struct ahash_alg { 222struct ahash_alg {
222 int (*init)(struct ahash_request *req); 223 int (*init)(struct ahash_request *req);
224 int (*reinit)(struct ahash_request *req);
223 int (*update)(struct ahash_request *req); 225 int (*update)(struct ahash_request *req);
224 int (*final)(struct ahash_request *req); 226 int (*final)(struct ahash_request *req);
225 int (*digest)(struct ahash_request *req); 227 int (*digest)(struct ahash_request *req);
@@ -480,6 +482,8 @@ struct crypto_tfm {
480 struct compress_tfm compress; 482 struct compress_tfm compress;
481 struct rng_tfm rng; 483 struct rng_tfm rng;
482 } crt_u; 484 } crt_u;
485
486 void (*exit)(struct crypto_tfm *tfm);
483 487
484 struct crypto_alg *__crt_alg; 488 struct crypto_alg *__crt_alg;
485 489
@@ -544,7 +548,9 @@ struct crypto_attr_u32 {
544 * Transform user interface. 548 * Transform user interface.
545 */ 549 */
546 550
547struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags); 551struct crypto_tfm *crypto_alloc_tfm(const char *alg_name,
552 const struct crypto_type *frontend,
553 u32 type, u32 mask);
548struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask); 554struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
549void crypto_free_tfm(struct crypto_tfm *tfm); 555void crypto_free_tfm(struct crypto_tfm *tfm);
550 556
diff --git a/lib/Kconfig b/lib/Kconfig
index 85cf7ea978aa..fd4118e097f0 100644
--- a/lib/Kconfig
+++ b/lib/Kconfig
@@ -64,6 +64,8 @@ config CRC7
64 64
65config LIBCRC32C 65config LIBCRC32C
66 tristate "CRC32c (Castagnoli, et al) Cyclic Redundancy-Check" 66 tristate "CRC32c (Castagnoli, et al) Cyclic Redundancy-Check"
67 select CRYPTO
68 select CRYPTO_CRC32C
67 help 69 help
68 This option is provided for the case where no in-kernel-tree 70 This option is provided for the case where no in-kernel-tree
69 modules require CRC32c functions, but a module built outside the 71 modules require CRC32c functions, but a module built outside the
diff --git a/lib/libcrc32c.c b/lib/libcrc32c.c
index b5c3287d8ea4..244f5480c898 100644
--- a/lib/libcrc32c.c
+++ b/lib/libcrc32c.c
@@ -30,168 +30,52 @@
30 * any later version. 30 * any later version.
31 * 31 *
32 */ 32 */
33#include <linux/crc32c.h>
34#include <linux/compiler.h>
35#include <linux/module.h>
36
37MODULE_AUTHOR("Clay Haapala <chaapala@cisco.com>");
38MODULE_DESCRIPTION("CRC32c (Castagnoli) calculations");
39MODULE_LICENSE("GPL");
40 33
41#define CRC32C_POLY_BE 0x1EDC6F41 34#include <crypto/hash.h>
42#define CRC32C_POLY_LE 0x82F63B78 35#include <linux/err.h>
36#include <linux/init.h>
37#include <linux/kernel.h>
38#include <linux/module.h>
43 39
44#ifndef CRC_LE_BITS 40static struct crypto_shash *tfm;
45# define CRC_LE_BITS 8
46#endif
47 41
42u32 crc32c(u32 crc, const void *address, unsigned int length)
43{
44 struct {
45 struct shash_desc shash;
46 char ctx[crypto_shash_descsize(tfm)];
47 } desc;
48 int err;
48 49
49/* 50 desc.shash.tfm = tfm;
50 * Haven't generated a big-endian table yet, but the bit-wise version 51 desc.shash.flags = 0;
51 * should at least work. 52 *(u32 *)desc.ctx = crc;
52 */
53#if defined CRC_BE_BITS && CRC_BE_BITS != 1
54#undef CRC_BE_BITS
55#endif
56#ifndef CRC_BE_BITS
57# define CRC_BE_BITS 1
58#endif
59 53
60EXPORT_SYMBOL(crc32c_le); 54 err = crypto_shash_update(&desc.shash, address, length);
55 BUG_ON(err);
61 56
62#if CRC_LE_BITS == 1 57 return *(u32 *)desc.ctx;
63/*
64 * Compute things bit-wise, as done in crc32.c. We could share the tight
65 * loop below with crc32 and vary the POLY if we don't find value in terms
66 * of space and maintainability in keeping the two modules separate.
67 */
68u32 __pure
69crc32c_le(u32 crc, unsigned char const *p, size_t len)
70{
71 int i;
72 while (len--) {
73 crc ^= *p++;
74 for (i = 0; i < 8; i++)
75 crc = (crc >> 1) ^ ((crc & 1) ? CRC32C_POLY_LE : 0);
76 }
77 return crc;
78} 58}
79#else
80
81/*
82 * This is the CRC-32C table
83 * Generated with:
84 * width = 32 bits
85 * poly = 0x1EDC6F41
86 * reflect input bytes = true
87 * reflect output bytes = true
88 */
89
90static const u32 crc32c_table[256] = {
91 0x00000000L, 0xF26B8303L, 0xE13B70F7L, 0x1350F3F4L,
92 0xC79A971FL, 0x35F1141CL, 0x26A1E7E8L, 0xD4CA64EBL,
93 0x8AD958CFL, 0x78B2DBCCL, 0x6BE22838L, 0x9989AB3BL,
94 0x4D43CFD0L, 0xBF284CD3L, 0xAC78BF27L, 0x5E133C24L,
95 0x105EC76FL, 0xE235446CL, 0xF165B798L, 0x030E349BL,
96 0xD7C45070L, 0x25AFD373L, 0x36FF2087L, 0xC494A384L,
97 0x9A879FA0L, 0x68EC1CA3L, 0x7BBCEF57L, 0x89D76C54L,
98 0x5D1D08BFL, 0xAF768BBCL, 0xBC267848L, 0x4E4DFB4BL,
99 0x20BD8EDEL, 0xD2D60DDDL, 0xC186FE29L, 0x33ED7D2AL,
100 0xE72719C1L, 0x154C9AC2L, 0x061C6936L, 0xF477EA35L,
101 0xAA64D611L, 0x580F5512L, 0x4B5FA6E6L, 0xB93425E5L,
102 0x6DFE410EL, 0x9F95C20DL, 0x8CC531F9L, 0x7EAEB2FAL,
103 0x30E349B1L, 0xC288CAB2L, 0xD1D83946L, 0x23B3BA45L,
104 0xF779DEAEL, 0x05125DADL, 0x1642AE59L, 0xE4292D5AL,
105 0xBA3A117EL, 0x4851927DL, 0x5B016189L, 0xA96AE28AL,
106 0x7DA08661L, 0x8FCB0562L, 0x9C9BF696L, 0x6EF07595L,
107 0x417B1DBCL, 0xB3109EBFL, 0xA0406D4BL, 0x522BEE48L,
108 0x86E18AA3L, 0x748A09A0L, 0x67DAFA54L, 0x95B17957L,
109 0xCBA24573L, 0x39C9C670L, 0x2A993584L, 0xD8F2B687L,
110 0x0C38D26CL, 0xFE53516FL, 0xED03A29BL, 0x1F682198L,
111 0x5125DAD3L, 0xA34E59D0L, 0xB01EAA24L, 0x42752927L,
112 0x96BF4DCCL, 0x64D4CECFL, 0x77843D3BL, 0x85EFBE38L,
113 0xDBFC821CL, 0x2997011FL, 0x3AC7F2EBL, 0xC8AC71E8L,
114 0x1C661503L, 0xEE0D9600L, 0xFD5D65F4L, 0x0F36E6F7L,
115 0x61C69362L, 0x93AD1061L, 0x80FDE395L, 0x72966096L,
116 0xA65C047DL, 0x5437877EL, 0x4767748AL, 0xB50CF789L,
117 0xEB1FCBADL, 0x197448AEL, 0x0A24BB5AL, 0xF84F3859L,
118 0x2C855CB2L, 0xDEEEDFB1L, 0xCDBE2C45L, 0x3FD5AF46L,
119 0x7198540DL, 0x83F3D70EL, 0x90A324FAL, 0x62C8A7F9L,
120 0xB602C312L, 0x44694011L, 0x5739B3E5L, 0xA55230E6L,
121 0xFB410CC2L, 0x092A8FC1L, 0x1A7A7C35L, 0xE811FF36L,
122 0x3CDB9BDDL, 0xCEB018DEL, 0xDDE0EB2AL, 0x2F8B6829L,
123 0x82F63B78L, 0x709DB87BL, 0x63CD4B8FL, 0x91A6C88CL,
124 0x456CAC67L, 0xB7072F64L, 0xA457DC90L, 0x563C5F93L,
125 0x082F63B7L, 0xFA44E0B4L, 0xE9141340L, 0x1B7F9043L,
126 0xCFB5F4A8L, 0x3DDE77ABL, 0x2E8E845FL, 0xDCE5075CL,
127 0x92A8FC17L, 0x60C37F14L, 0x73938CE0L, 0x81F80FE3L,
128 0x55326B08L, 0xA759E80BL, 0xB4091BFFL, 0x466298FCL,
129 0x1871A4D8L, 0xEA1A27DBL, 0xF94AD42FL, 0x0B21572CL,
130 0xDFEB33C7L, 0x2D80B0C4L, 0x3ED04330L, 0xCCBBC033L,
131 0xA24BB5A6L, 0x502036A5L, 0x4370C551L, 0xB11B4652L,
132 0x65D122B9L, 0x97BAA1BAL, 0x84EA524EL, 0x7681D14DL,
133 0x2892ED69L, 0xDAF96E6AL, 0xC9A99D9EL, 0x3BC21E9DL,
134 0xEF087A76L, 0x1D63F975L, 0x0E330A81L, 0xFC588982L,
135 0xB21572C9L, 0x407EF1CAL, 0x532E023EL, 0xA145813DL,
136 0x758FE5D6L, 0x87E466D5L, 0x94B49521L, 0x66DF1622L,
137 0x38CC2A06L, 0xCAA7A905L, 0xD9F75AF1L, 0x2B9CD9F2L,
138 0xFF56BD19L, 0x0D3D3E1AL, 0x1E6DCDEEL, 0xEC064EEDL,
139 0xC38D26C4L, 0x31E6A5C7L, 0x22B65633L, 0xD0DDD530L,
140 0x0417B1DBL, 0xF67C32D8L, 0xE52CC12CL, 0x1747422FL,
141 0x49547E0BL, 0xBB3FFD08L, 0xA86F0EFCL, 0x5A048DFFL,
142 0x8ECEE914L, 0x7CA56A17L, 0x6FF599E3L, 0x9D9E1AE0L,
143 0xD3D3E1ABL, 0x21B862A8L, 0x32E8915CL, 0xC083125FL,
144 0x144976B4L, 0xE622F5B7L, 0xF5720643L, 0x07198540L,
145 0x590AB964L, 0xAB613A67L, 0xB831C993L, 0x4A5A4A90L,
146 0x9E902E7BL, 0x6CFBAD78L, 0x7FAB5E8CL, 0x8DC0DD8FL,
147 0xE330A81AL, 0x115B2B19L, 0x020BD8EDL, 0xF0605BEEL,
148 0x24AA3F05L, 0xD6C1BC06L, 0xC5914FF2L, 0x37FACCF1L,
149 0x69E9F0D5L, 0x9B8273D6L, 0x88D28022L, 0x7AB90321L,
150 0xAE7367CAL, 0x5C18E4C9L, 0x4F48173DL, 0xBD23943EL,
151 0xF36E6F75L, 0x0105EC76L, 0x12551F82L, 0xE03E9C81L,
152 0x34F4F86AL, 0xC69F7B69L, 0xD5CF889DL, 0x27A40B9EL,
153 0x79B737BAL, 0x8BDCB4B9L, 0x988C474DL, 0x6AE7C44EL,
154 0xBE2DA0A5L, 0x4C4623A6L, 0x5F16D052L, 0xAD7D5351L
155};
156 59
157/* 60EXPORT_SYMBOL(crc32c);
158 * Steps through buffer one byte at at time, calculates reflected
159 * crc using table.
160 */
161 61
162u32 __pure 62static int __init libcrc32c_mod_init(void)
163crc32c_le(u32 crc, unsigned char const *data, size_t length)
164{ 63{
165 while (length--) 64 tfm = crypto_alloc_shash("crc32c", 0, 0);
166 crc = 65 if (IS_ERR(tfm))
167 crc32c_table[(crc ^ *data++) & 0xFFL] ^ (crc >> 8); 66 return PTR_ERR(tfm);
168 67
169 return crc; 68 return 0;
170} 69}
171 70
172#endif /* CRC_LE_BITS == 8 */ 71static void __exit libcrc32c_mod_fini(void)
173
174EXPORT_SYMBOL(crc32c_be);
175
176#if CRC_BE_BITS == 1
177u32 __pure
178crc32c_be(u32 crc, unsigned char const *p, size_t len)
179{ 72{
180 int i; 73 crypto_free_shash(tfm);
181 while (len--) {
182 crc ^= *p++ << 24;
183 for (i = 0; i < 8; i++)
184 crc =
185 (crc << 1) ^ ((crc & 0x80000000) ? CRC32C_POLY_BE :
186 0);
187 }
188 return crc;
189} 74}
190#endif
191 75
192/* 76module_init(libcrc32c_mod_init);
193 * Unit test 77module_exit(libcrc32c_mod_fini);
194 * 78
195 * A small unit test suite is implemented as part of the crypto suite. 79MODULE_AUTHOR("Clay Haapala <chaapala@cisco.com>");
196 * Select CRYPTO_CRC32C and use the tcrypt module to run the tests. 80MODULE_DESCRIPTION("CRC32c (Castagnoli) calculations");
197 */ 81MODULE_LICENSE("GPL");