summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2018-04-04 20:11:08 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2018-04-04 20:11:08 -0400
commit9eb31227cbccd3a37da0f42604f1ab5fc556bc53 (patch)
tree9aa467e620e002bf01cecdd98e3908e0cc3e7221 /crypto
parent527cd20771888443b5d8707debe98f62c7a1f596 (diff)
parentf444ec106407d600f17fa1a4bd14f84577401dec (diff)
Merge branch 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu: "API: - add AEAD support to crypto engine - allow batch registration in simd Algorithms: - add CFB mode - add speck block cipher - add sm4 block cipher - new test case for crct10dif - improve scheduling latency on ARM - scatter/gather support to gcm in aesni - convert x86 crypto algorithms to skcihper Drivers: - hmac(sha224/sha256) support in inside-secure - aes gcm/ccm support in stm32 - stm32mp1 support in stm32 - ccree driver from staging tree - gcm support over QI in caam - add ks-sa hwrng driver" * 'linus' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (212 commits) crypto: ccree - remove unused enums crypto: ahash - Fix early termination in hash walk crypto: brcm - explicitly cast cipher to hash type crypto: talitos - don't leak pointers to authenc keys crypto: qat - don't leak pointers to authenc keys crypto: picoxcell - don't leak pointers to authenc keys crypto: ixp4xx - don't leak pointers to authenc keys crypto: chelsio - don't leak pointers to authenc keys crypto: caam/qi - don't leak pointers to authenc keys crypto: caam - don't leak pointers to authenc keys crypto: lrw - Free rctx->ext with kzfree crypto: talitos - fix IPsec cipher in length crypto: Deduplicate le32_to_cpu_array() and cpu_to_le32_array() crypto: doc - clarify hash callbacks state machine crypto: api - Keep failed instances alive crypto: api - Make crypto_alg_lookup static crypto: api - Remove unused crypto_type lookup function crypto: chelsio - Remove declaration of static function from header crypto: inside-secure - hmac(sha224) support crypto: inside-secure - hmac(sha256) support ..
Diffstat (limited to 'crypto')
-rw-r--r--crypto/Kconfig129
-rw-r--r--crypto/Makefile4
-rw-r--r--crypto/ablk_helper.c150
-rw-r--r--crypto/ahash.c25
-rw-r--r--crypto/algapi.c8
-rw-r--r--crypto/api.c34
-rw-r--r--crypto/cfb.c353
-rw-r--r--crypto/crypto_engine.c301
-rw-r--r--crypto/crypto_user.c2
-rw-r--r--crypto/ecc.c23
-rw-r--r--crypto/ecdh.c23
-rw-r--r--crypto/internal.h1
-rw-r--r--crypto/lrw.c154
-rw-r--r--crypto/mcryptd.c34
-rw-r--r--crypto/md4.c17
-rw-r--r--crypto/md5.c17
-rw-r--r--crypto/rsa-pkcs1pad.c2
-rw-r--r--crypto/simd.c50
-rw-r--r--crypto/sm4_generic.c244
-rw-r--r--crypto/speck.c307
-rw-r--r--crypto/tcrypt.c3
-rw-r--r--crypto/testmgr.c45
-rw-r--r--crypto/testmgr.h1882
-rw-r--r--crypto/xts.c72
24 files changed, 3219 insertions, 661 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index b75264b09a46..c0dabed5122e 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -245,10 +245,6 @@ config CRYPTO_TEST
245 help 245 help
246 Quick & dirty crypto test module. 246 Quick & dirty crypto test module.
247 247
248config CRYPTO_ABLK_HELPER
249 tristate
250 select CRYPTO_CRYPTD
251
252config CRYPTO_SIMD 248config CRYPTO_SIMD
253 tristate 249 tristate
254 select CRYPTO_CRYPTD 250 select CRYPTO_CRYPTD
@@ -324,6 +320,14 @@ config CRYPTO_CBC
324 CBC: Cipher Block Chaining mode 320 CBC: Cipher Block Chaining mode
325 This block cipher algorithm is required for IPSec. 321 This block cipher algorithm is required for IPSec.
326 322
323config CRYPTO_CFB
324 tristate "CFB support"
325 select CRYPTO_BLKCIPHER
326 select CRYPTO_MANAGER
327 help
328 CFB: Cipher FeedBack mode
329 This block cipher algorithm is required for TPM2 Cryptography.
330
327config CRYPTO_CTR 331config CRYPTO_CTR
328 tristate "CTR support" 332 tristate "CTR support"
329 select CRYPTO_BLKCIPHER 333 select CRYPTO_BLKCIPHER
@@ -1114,7 +1118,7 @@ config CRYPTO_BLOWFISH_COMMON
1114config CRYPTO_BLOWFISH_X86_64 1118config CRYPTO_BLOWFISH_X86_64
1115 tristate "Blowfish cipher algorithm (x86_64)" 1119 tristate "Blowfish cipher algorithm (x86_64)"
1116 depends on X86 && 64BIT 1120 depends on X86 && 64BIT
1117 select CRYPTO_ALGAPI 1121 select CRYPTO_BLKCIPHER
1118 select CRYPTO_BLOWFISH_COMMON 1122 select CRYPTO_BLOWFISH_COMMON
1119 help 1123 help
1120 Blowfish cipher algorithm (x86_64), by Bruce Schneier. 1124 Blowfish cipher algorithm (x86_64), by Bruce Schneier.
@@ -1145,10 +1149,8 @@ config CRYPTO_CAMELLIA_X86_64
1145 tristate "Camellia cipher algorithm (x86_64)" 1149 tristate "Camellia cipher algorithm (x86_64)"
1146 depends on X86 && 64BIT 1150 depends on X86 && 64BIT
1147 depends on CRYPTO 1151 depends on CRYPTO
1148 select CRYPTO_ALGAPI 1152 select CRYPTO_BLKCIPHER
1149 select CRYPTO_GLUE_HELPER_X86 1153 select CRYPTO_GLUE_HELPER_X86
1150 select CRYPTO_LRW
1151 select CRYPTO_XTS
1152 help 1154 help
1153 Camellia cipher algorithm module (x86_64). 1155 Camellia cipher algorithm module (x86_64).
1154 1156
@@ -1164,12 +1166,10 @@ config CRYPTO_CAMELLIA_AESNI_AVX_X86_64
1164 tristate "Camellia cipher algorithm (x86_64/AES-NI/AVX)" 1166 tristate "Camellia cipher algorithm (x86_64/AES-NI/AVX)"
1165 depends on X86 && 64BIT 1167 depends on X86 && 64BIT
1166 depends on CRYPTO 1168 depends on CRYPTO
1167 select CRYPTO_ALGAPI 1169 select CRYPTO_BLKCIPHER
1168 select CRYPTO_CRYPTD
1169 select CRYPTO_ABLK_HELPER
1170 select CRYPTO_GLUE_HELPER_X86
1171 select CRYPTO_CAMELLIA_X86_64 1170 select CRYPTO_CAMELLIA_X86_64
1172 select CRYPTO_LRW 1171 select CRYPTO_GLUE_HELPER_X86
1172 select CRYPTO_SIMD
1173 select CRYPTO_XTS 1173 select CRYPTO_XTS
1174 help 1174 help
1175 Camellia cipher algorithm module (x86_64/AES-NI/AVX). 1175 Camellia cipher algorithm module (x86_64/AES-NI/AVX).
@@ -1186,14 +1186,7 @@ config CRYPTO_CAMELLIA_AESNI_AVX2_X86_64
1186 tristate "Camellia cipher algorithm (x86_64/AES-NI/AVX2)" 1186 tristate "Camellia cipher algorithm (x86_64/AES-NI/AVX2)"
1187 depends on X86 && 64BIT 1187 depends on X86 && 64BIT
1188 depends on CRYPTO 1188 depends on CRYPTO
1189 select CRYPTO_ALGAPI
1190 select CRYPTO_CRYPTD
1191 select CRYPTO_ABLK_HELPER
1192 select CRYPTO_GLUE_HELPER_X86
1193 select CRYPTO_CAMELLIA_X86_64
1194 select CRYPTO_CAMELLIA_AESNI_AVX_X86_64 1189 select CRYPTO_CAMELLIA_AESNI_AVX_X86_64
1195 select CRYPTO_LRW
1196 select CRYPTO_XTS
1197 help 1190 help
1198 Camellia cipher algorithm module (x86_64/AES-NI/AVX2). 1191 Camellia cipher algorithm module (x86_64/AES-NI/AVX2).
1199 1192
@@ -1238,11 +1231,10 @@ config CRYPTO_CAST5
1238config CRYPTO_CAST5_AVX_X86_64 1231config CRYPTO_CAST5_AVX_X86_64
1239 tristate "CAST5 (CAST-128) cipher algorithm (x86_64/AVX)" 1232 tristate "CAST5 (CAST-128) cipher algorithm (x86_64/AVX)"
1240 depends on X86 && 64BIT 1233 depends on X86 && 64BIT
1241 select CRYPTO_ALGAPI 1234 select CRYPTO_BLKCIPHER
1242 select CRYPTO_CRYPTD
1243 select CRYPTO_ABLK_HELPER
1244 select CRYPTO_CAST_COMMON
1245 select CRYPTO_CAST5 1235 select CRYPTO_CAST5
1236 select CRYPTO_CAST_COMMON
1237 select CRYPTO_SIMD
1246 help 1238 help
1247 The CAST5 encryption algorithm (synonymous with CAST-128) is 1239 The CAST5 encryption algorithm (synonymous with CAST-128) is
1248 described in RFC2144. 1240 described in RFC2144.
@@ -1261,13 +1253,11 @@ config CRYPTO_CAST6
1261config CRYPTO_CAST6_AVX_X86_64 1253config CRYPTO_CAST6_AVX_X86_64
1262 tristate "CAST6 (CAST-256) cipher algorithm (x86_64/AVX)" 1254 tristate "CAST6 (CAST-256) cipher algorithm (x86_64/AVX)"
1263 depends on X86 && 64BIT 1255 depends on X86 && 64BIT
1264 select CRYPTO_ALGAPI 1256 select CRYPTO_BLKCIPHER
1265 select CRYPTO_CRYPTD
1266 select CRYPTO_ABLK_HELPER
1267 select CRYPTO_GLUE_HELPER_X86
1268 select CRYPTO_CAST_COMMON
1269 select CRYPTO_CAST6 1257 select CRYPTO_CAST6
1270 select CRYPTO_LRW 1258 select CRYPTO_CAST_COMMON
1259 select CRYPTO_GLUE_HELPER_X86
1260 select CRYPTO_SIMD
1271 select CRYPTO_XTS 1261 select CRYPTO_XTS
1272 help 1262 help
1273 The CAST6 encryption algorithm (synonymous with CAST-256) is 1263 The CAST6 encryption algorithm (synonymous with CAST-256) is
@@ -1294,7 +1284,7 @@ config CRYPTO_DES_SPARC64
1294config CRYPTO_DES3_EDE_X86_64 1284config CRYPTO_DES3_EDE_X86_64
1295 tristate "Triple DES EDE cipher algorithm (x86-64)" 1285 tristate "Triple DES EDE cipher algorithm (x86-64)"
1296 depends on X86 && 64BIT 1286 depends on X86 && 64BIT
1297 select CRYPTO_ALGAPI 1287 select CRYPTO_BLKCIPHER
1298 select CRYPTO_DES 1288 select CRYPTO_DES
1299 help 1289 help
1300 Triple DES EDE (FIPS 46-3) algorithm. 1290 Triple DES EDE (FIPS 46-3) algorithm.
@@ -1422,13 +1412,10 @@ config CRYPTO_SERPENT
1422config CRYPTO_SERPENT_SSE2_X86_64 1412config CRYPTO_SERPENT_SSE2_X86_64
1423 tristate "Serpent cipher algorithm (x86_64/SSE2)" 1413 tristate "Serpent cipher algorithm (x86_64/SSE2)"
1424 depends on X86 && 64BIT 1414 depends on X86 && 64BIT
1425 select CRYPTO_ALGAPI 1415 select CRYPTO_BLKCIPHER
1426 select CRYPTO_CRYPTD
1427 select CRYPTO_ABLK_HELPER
1428 select CRYPTO_GLUE_HELPER_X86 1416 select CRYPTO_GLUE_HELPER_X86
1429 select CRYPTO_SERPENT 1417 select CRYPTO_SERPENT
1430 select CRYPTO_LRW 1418 select CRYPTO_SIMD
1431 select CRYPTO_XTS
1432 help 1419 help
1433 Serpent cipher algorithm, by Anderson, Biham & Knudsen. 1420 Serpent cipher algorithm, by Anderson, Biham & Knudsen.
1434 1421
@@ -1444,13 +1431,10 @@ config CRYPTO_SERPENT_SSE2_X86_64
1444config CRYPTO_SERPENT_SSE2_586 1431config CRYPTO_SERPENT_SSE2_586
1445 tristate "Serpent cipher algorithm (i586/SSE2)" 1432 tristate "Serpent cipher algorithm (i586/SSE2)"
1446 depends on X86 && !64BIT 1433 depends on X86 && !64BIT
1447 select CRYPTO_ALGAPI 1434 select CRYPTO_BLKCIPHER
1448 select CRYPTO_CRYPTD
1449 select CRYPTO_ABLK_HELPER
1450 select CRYPTO_GLUE_HELPER_X86 1435 select CRYPTO_GLUE_HELPER_X86
1451 select CRYPTO_SERPENT 1436 select CRYPTO_SERPENT
1452 select CRYPTO_LRW 1437 select CRYPTO_SIMD
1453 select CRYPTO_XTS
1454 help 1438 help
1455 Serpent cipher algorithm, by Anderson, Biham & Knudsen. 1439 Serpent cipher algorithm, by Anderson, Biham & Knudsen.
1456 1440
@@ -1466,12 +1450,10 @@ config CRYPTO_SERPENT_SSE2_586
1466config CRYPTO_SERPENT_AVX_X86_64 1450config CRYPTO_SERPENT_AVX_X86_64
1467 tristate "Serpent cipher algorithm (x86_64/AVX)" 1451 tristate "Serpent cipher algorithm (x86_64/AVX)"
1468 depends on X86 && 64BIT 1452 depends on X86 && 64BIT
1469 select CRYPTO_ALGAPI 1453 select CRYPTO_BLKCIPHER
1470 select CRYPTO_CRYPTD
1471 select CRYPTO_ABLK_HELPER
1472 select CRYPTO_GLUE_HELPER_X86 1454 select CRYPTO_GLUE_HELPER_X86
1473 select CRYPTO_SERPENT 1455 select CRYPTO_SERPENT
1474 select CRYPTO_LRW 1456 select CRYPTO_SIMD
1475 select CRYPTO_XTS 1457 select CRYPTO_XTS
1476 help 1458 help
1477 Serpent cipher algorithm, by Anderson, Biham & Knudsen. 1459 Serpent cipher algorithm, by Anderson, Biham & Knudsen.
@@ -1488,14 +1470,7 @@ config CRYPTO_SERPENT_AVX_X86_64
1488config CRYPTO_SERPENT_AVX2_X86_64 1470config CRYPTO_SERPENT_AVX2_X86_64
1489 tristate "Serpent cipher algorithm (x86_64/AVX2)" 1471 tristate "Serpent cipher algorithm (x86_64/AVX2)"
1490 depends on X86 && 64BIT 1472 depends on X86 && 64BIT
1491 select CRYPTO_ALGAPI
1492 select CRYPTO_CRYPTD
1493 select CRYPTO_ABLK_HELPER
1494 select CRYPTO_GLUE_HELPER_X86
1495 select CRYPTO_SERPENT
1496 select CRYPTO_SERPENT_AVX_X86_64 1473 select CRYPTO_SERPENT_AVX_X86_64
1497 select CRYPTO_LRW
1498 select CRYPTO_XTS
1499 help 1474 help
1500 Serpent cipher algorithm, by Anderson, Biham & Knudsen. 1475 Serpent cipher algorithm, by Anderson, Biham & Knudsen.
1501 1476
@@ -1508,6 +1483,45 @@ config CRYPTO_SERPENT_AVX2_X86_64
1508 See also: 1483 See also:
1509 <http://www.cl.cam.ac.uk/~rja14/serpent.html> 1484 <http://www.cl.cam.ac.uk/~rja14/serpent.html>
1510 1485
1486config CRYPTO_SM4
1487 tristate "SM4 cipher algorithm"
1488 select CRYPTO_ALGAPI
1489 help
1490 SM4 cipher algorithms (OSCCA GB/T 32907-2016).
1491
1492 SM4 (GBT.32907-2016) is a cryptographic standard issued by the
1493 Organization of State Commercial Administration of China (OSCCA)
1494 as an authorized cryptographic algorithms for the use within China.
1495
1496 SMS4 was originally created for use in protecting wireless
1497 networks, and is mandated in the Chinese National Standard for
1498 Wireless LAN WAPI (Wired Authentication and Privacy Infrastructure)
1499 (GB.15629.11-2003).
1500
1501 The latest SM4 standard (GBT.32907-2016) was proposed by OSCCA and
1502 standardized through TC 260 of the Standardization Administration
1503 of the People's Republic of China (SAC).
1504
1505 The input, output, and key of SMS4 are each 128 bits.
1506
1507 See also: <https://eprint.iacr.org/2008/329.pdf>
1508
1509 If unsure, say N.
1510
1511config CRYPTO_SPECK
1512 tristate "Speck cipher algorithm"
1513 select CRYPTO_ALGAPI
1514 help
1515 Speck is a lightweight block cipher that is tuned for optimal
1516 performance in software (rather than hardware).
1517
1518 Speck may not be as secure as AES, and should only be used on systems
1519 where AES is not fast enough.
1520
1521 See also: <https://eprint.iacr.org/2013/404.pdf>
1522
1523 If unsure, say N.
1524
1511config CRYPTO_TEA 1525config CRYPTO_TEA
1512 tristate "TEA, XTEA and XETA cipher algorithms" 1526 tristate "TEA, XTEA and XETA cipher algorithms"
1513 select CRYPTO_ALGAPI 1527 select CRYPTO_ALGAPI
@@ -1581,12 +1595,10 @@ config CRYPTO_TWOFISH_X86_64
1581config CRYPTO_TWOFISH_X86_64_3WAY 1595config CRYPTO_TWOFISH_X86_64_3WAY
1582 tristate "Twofish cipher algorithm (x86_64, 3-way parallel)" 1596 tristate "Twofish cipher algorithm (x86_64, 3-way parallel)"
1583 depends on X86 && 64BIT 1597 depends on X86 && 64BIT
1584 select CRYPTO_ALGAPI 1598 select CRYPTO_BLKCIPHER
1585 select CRYPTO_TWOFISH_COMMON 1599 select CRYPTO_TWOFISH_COMMON
1586 select CRYPTO_TWOFISH_X86_64 1600 select CRYPTO_TWOFISH_X86_64
1587 select CRYPTO_GLUE_HELPER_X86 1601 select CRYPTO_GLUE_HELPER_X86
1588 select CRYPTO_LRW
1589 select CRYPTO_XTS
1590 help 1602 help
1591 Twofish cipher algorithm (x86_64, 3-way parallel). 1603 Twofish cipher algorithm (x86_64, 3-way parallel).
1592 1604
@@ -1604,15 +1616,12 @@ config CRYPTO_TWOFISH_X86_64_3WAY
1604config CRYPTO_TWOFISH_AVX_X86_64 1616config CRYPTO_TWOFISH_AVX_X86_64
1605 tristate "Twofish cipher algorithm (x86_64/AVX)" 1617 tristate "Twofish cipher algorithm (x86_64/AVX)"
1606 depends on X86 && 64BIT 1618 depends on X86 && 64BIT
1607 select CRYPTO_ALGAPI 1619 select CRYPTO_BLKCIPHER
1608 select CRYPTO_CRYPTD
1609 select CRYPTO_ABLK_HELPER
1610 select CRYPTO_GLUE_HELPER_X86 1620 select CRYPTO_GLUE_HELPER_X86
1621 select CRYPTO_SIMD
1611 select CRYPTO_TWOFISH_COMMON 1622 select CRYPTO_TWOFISH_COMMON
1612 select CRYPTO_TWOFISH_X86_64 1623 select CRYPTO_TWOFISH_X86_64
1613 select CRYPTO_TWOFISH_X86_64_3WAY 1624 select CRYPTO_TWOFISH_X86_64_3WAY
1614 select CRYPTO_LRW
1615 select CRYPTO_XTS
1616 help 1625 help
1617 Twofish cipher algorithm (x86_64/AVX). 1626 Twofish cipher algorithm (x86_64/AVX).
1618 1627
diff --git a/crypto/Makefile b/crypto/Makefile
index cdbc03b35510..4fc69fe94e6a 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -78,6 +78,7 @@ obj-$(CONFIG_CRYPTO_TGR192) += tgr192.o
78obj-$(CONFIG_CRYPTO_GF128MUL) += gf128mul.o 78obj-$(CONFIG_CRYPTO_GF128MUL) += gf128mul.o
79obj-$(CONFIG_CRYPTO_ECB) += ecb.o 79obj-$(CONFIG_CRYPTO_ECB) += ecb.o
80obj-$(CONFIG_CRYPTO_CBC) += cbc.o 80obj-$(CONFIG_CRYPTO_CBC) += cbc.o
81obj-$(CONFIG_CRYPTO_CFB) += cfb.o
81obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o 82obj-$(CONFIG_CRYPTO_PCBC) += pcbc.o
82obj-$(CONFIG_CRYPTO_CTS) += cts.o 83obj-$(CONFIG_CRYPTO_CTS) += cts.o
83obj-$(CONFIG_CRYPTO_LRW) += lrw.o 84obj-$(CONFIG_CRYPTO_LRW) += lrw.o
@@ -100,6 +101,7 @@ obj-$(CONFIG_CRYPTO_SERPENT) += serpent_generic.o
100CFLAGS_serpent_generic.o := $(call cc-option,-fsched-pressure) # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=79149 101CFLAGS_serpent_generic.o := $(call cc-option,-fsched-pressure) # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=79149
101obj-$(CONFIG_CRYPTO_AES) += aes_generic.o 102obj-$(CONFIG_CRYPTO_AES) += aes_generic.o
102CFLAGS_aes_generic.o := $(call cc-option,-fno-code-hoisting) # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83356 103CFLAGS_aes_generic.o := $(call cc-option,-fno-code-hoisting) # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83356
104obj-$(CONFIG_CRYPTO_SM4) += sm4_generic.o
103obj-$(CONFIG_CRYPTO_AES_TI) += aes_ti.o 105obj-$(CONFIG_CRYPTO_AES_TI) += aes_ti.o
104obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia_generic.o 106obj-$(CONFIG_CRYPTO_CAMELLIA) += camellia_generic.o
105obj-$(CONFIG_CRYPTO_CAST_COMMON) += cast_common.o 107obj-$(CONFIG_CRYPTO_CAST_COMMON) += cast_common.o
@@ -110,6 +112,7 @@ obj-$(CONFIG_CRYPTO_TEA) += tea.o
110obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o 112obj-$(CONFIG_CRYPTO_KHAZAD) += khazad.o
111obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o 113obj-$(CONFIG_CRYPTO_ANUBIS) += anubis.o
112obj-$(CONFIG_CRYPTO_SEED) += seed.o 114obj-$(CONFIG_CRYPTO_SEED) += seed.o
115obj-$(CONFIG_CRYPTO_SPECK) += speck.o
113obj-$(CONFIG_CRYPTO_SALSA20) += salsa20_generic.o 116obj-$(CONFIG_CRYPTO_SALSA20) += salsa20_generic.o
114obj-$(CONFIG_CRYPTO_CHACHA20) += chacha20_generic.o 117obj-$(CONFIG_CRYPTO_CHACHA20) += chacha20_generic.o
115obj-$(CONFIG_CRYPTO_POLY1305) += poly1305_generic.o 118obj-$(CONFIG_CRYPTO_POLY1305) += poly1305_generic.o
@@ -149,6 +152,5 @@ obj-$(CONFIG_XOR_BLOCKS) += xor.o
149obj-$(CONFIG_ASYNC_CORE) += async_tx/ 152obj-$(CONFIG_ASYNC_CORE) += async_tx/
150obj-$(CONFIG_ASYMMETRIC_KEY_TYPE) += asymmetric_keys/ 153obj-$(CONFIG_ASYMMETRIC_KEY_TYPE) += asymmetric_keys/
151obj-$(CONFIG_CRYPTO_HASH_INFO) += hash_info.o 154obj-$(CONFIG_CRYPTO_HASH_INFO) += hash_info.o
152obj-$(CONFIG_CRYPTO_ABLK_HELPER) += ablk_helper.o
153crypto_simd-y := simd.o 155crypto_simd-y := simd.o
154obj-$(CONFIG_CRYPTO_SIMD) += crypto_simd.o 156obj-$(CONFIG_CRYPTO_SIMD) += crypto_simd.o
diff --git a/crypto/ablk_helper.c b/crypto/ablk_helper.c
deleted file mode 100644
index 09776bb1360e..000000000000
--- a/crypto/ablk_helper.c
+++ /dev/null
@@ -1,150 +0,0 @@
1/*
2 * Shared async block cipher helpers
3 *
4 * Copyright (c) 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5 *
6 * Based on aesni-intel_glue.c by:
7 * Copyright (C) 2008, Intel Corp.
8 * Author: Huang Ying <ying.huang@intel.com>
9 *
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as published by
12 * the Free Software Foundation; either version 2 of the License, or
13 * (at your option) any later version.
14 *
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
19 *
20 * You should have received a copy of the GNU General Public License
21 * along with this program. If not, see <http://www.gnu.org/licenses/>.
22 *
23 */
24
25#include <linux/kernel.h>
26#include <linux/crypto.h>
27#include <linux/init.h>
28#include <linux/module.h>
29#include <crypto/algapi.h>
30#include <crypto/cryptd.h>
31#include <crypto/ablk_helper.h>
32#include <asm/simd.h>
33
34int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
35 unsigned int key_len)
36{
37 struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm);
38 struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base;
39 int err;
40
41 crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
42 crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm)
43 & CRYPTO_TFM_REQ_MASK);
44 err = crypto_ablkcipher_setkey(child, key, key_len);
45 crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child)
46 & CRYPTO_TFM_RES_MASK);
47 return err;
48}
49EXPORT_SYMBOL_GPL(ablk_set_key);
50
51int __ablk_encrypt(struct ablkcipher_request *req)
52{
53 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
54 struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm);
55 struct blkcipher_desc desc;
56
57 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
58 desc.info = req->info;
59 desc.flags = 0;
60
61 return crypto_blkcipher_crt(desc.tfm)->encrypt(
62 &desc, req->dst, req->src, req->nbytes);
63}
64EXPORT_SYMBOL_GPL(__ablk_encrypt);
65
66int ablk_encrypt(struct ablkcipher_request *req)
67{
68 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
69 struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm);
70
71 if (!may_use_simd() ||
72 (in_atomic() && cryptd_ablkcipher_queued(ctx->cryptd_tfm))) {
73 struct ablkcipher_request *cryptd_req =
74 ablkcipher_request_ctx(req);
75
76 *cryptd_req = *req;
77 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
78
79 return crypto_ablkcipher_encrypt(cryptd_req);
80 } else {
81 return __ablk_encrypt(req);
82 }
83}
84EXPORT_SYMBOL_GPL(ablk_encrypt);
85
86int ablk_decrypt(struct ablkcipher_request *req)
87{
88 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
89 struct async_helper_ctx *ctx = crypto_ablkcipher_ctx(tfm);
90
91 if (!may_use_simd() ||
92 (in_atomic() && cryptd_ablkcipher_queued(ctx->cryptd_tfm))) {
93 struct ablkcipher_request *cryptd_req =
94 ablkcipher_request_ctx(req);
95
96 *cryptd_req = *req;
97 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
98
99 return crypto_ablkcipher_decrypt(cryptd_req);
100 } else {
101 struct blkcipher_desc desc;
102
103 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
104 desc.info = req->info;
105 desc.flags = 0;
106
107 return crypto_blkcipher_crt(desc.tfm)->decrypt(
108 &desc, req->dst, req->src, req->nbytes);
109 }
110}
111EXPORT_SYMBOL_GPL(ablk_decrypt);
112
113void ablk_exit(struct crypto_tfm *tfm)
114{
115 struct async_helper_ctx *ctx = crypto_tfm_ctx(tfm);
116
117 cryptd_free_ablkcipher(ctx->cryptd_tfm);
118}
119EXPORT_SYMBOL_GPL(ablk_exit);
120
121int ablk_init_common(struct crypto_tfm *tfm, const char *drv_name)
122{
123 struct async_helper_ctx *ctx = crypto_tfm_ctx(tfm);
124 struct cryptd_ablkcipher *cryptd_tfm;
125
126 cryptd_tfm = cryptd_alloc_ablkcipher(drv_name, CRYPTO_ALG_INTERNAL,
127 CRYPTO_ALG_INTERNAL);
128 if (IS_ERR(cryptd_tfm))
129 return PTR_ERR(cryptd_tfm);
130
131 ctx->cryptd_tfm = cryptd_tfm;
132 tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
133 crypto_ablkcipher_reqsize(&cryptd_tfm->base);
134
135 return 0;
136}
137EXPORT_SYMBOL_GPL(ablk_init_common);
138
139int ablk_init(struct crypto_tfm *tfm)
140{
141 char drv_name[CRYPTO_MAX_ALG_NAME];
142
143 snprintf(drv_name, sizeof(drv_name), "__driver-%s",
144 crypto_tfm_alg_driver_name(tfm));
145
146 return ablk_init_common(tfm, drv_name);
147}
148EXPORT_SYMBOL_GPL(ablk_init);
149
150MODULE_LICENSE("GPL");
diff --git a/crypto/ahash.c b/crypto/ahash.c
index 266fc1d64f61..a64c143165b1 100644
--- a/crypto/ahash.c
+++ b/crypto/ahash.c
@@ -92,13 +92,14 @@ int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
92 92
93 if (nbytes && walk->offset & alignmask && !err) { 93 if (nbytes && walk->offset & alignmask && !err) {
94 walk->offset = ALIGN(walk->offset, alignmask + 1); 94 walk->offset = ALIGN(walk->offset, alignmask + 1);
95 walk->data += walk->offset;
96
97 nbytes = min(nbytes, 95 nbytes = min(nbytes,
98 ((unsigned int)(PAGE_SIZE)) - walk->offset); 96 ((unsigned int)(PAGE_SIZE)) - walk->offset);
99 walk->entrylen -= nbytes; 97 walk->entrylen -= nbytes;
100 98
101 return nbytes; 99 if (nbytes) {
100 walk->data += walk->offset;
101 return nbytes;
102 }
102 } 103 }
103 104
104 if (walk->flags & CRYPTO_ALG_ASYNC) 105 if (walk->flags & CRYPTO_ALG_ASYNC)
@@ -446,24 +447,12 @@ static int ahash_def_finup(struct ahash_request *req)
446 return ahash_def_finup_finish1(req, err); 447 return ahash_def_finup_finish1(req, err);
447} 448}
448 449
449static int ahash_no_export(struct ahash_request *req, void *out)
450{
451 return -ENOSYS;
452}
453
454static int ahash_no_import(struct ahash_request *req, const void *in)
455{
456 return -ENOSYS;
457}
458
459static int crypto_ahash_init_tfm(struct crypto_tfm *tfm) 450static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
460{ 451{
461 struct crypto_ahash *hash = __crypto_ahash_cast(tfm); 452 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
462 struct ahash_alg *alg = crypto_ahash_alg(hash); 453 struct ahash_alg *alg = crypto_ahash_alg(hash);
463 454
464 hash->setkey = ahash_nosetkey; 455 hash->setkey = ahash_nosetkey;
465 hash->export = ahash_no_export;
466 hash->import = ahash_no_import;
467 456
468 if (tfm->__crt_alg->cra_type != &crypto_ahash_type) 457 if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
469 return crypto_init_shash_ops_async(tfm); 458 return crypto_init_shash_ops_async(tfm);
@@ -473,16 +462,14 @@ static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
473 hash->final = alg->final; 462 hash->final = alg->final;
474 hash->finup = alg->finup ?: ahash_def_finup; 463 hash->finup = alg->finup ?: ahash_def_finup;
475 hash->digest = alg->digest; 464 hash->digest = alg->digest;
465 hash->export = alg->export;
466 hash->import = alg->import;
476 467
477 if (alg->setkey) { 468 if (alg->setkey) {
478 hash->setkey = alg->setkey; 469 hash->setkey = alg->setkey;
479 if (!(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY)) 470 if (!(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
480 crypto_ahash_set_flags(hash, CRYPTO_TFM_NEED_KEY); 471 crypto_ahash_set_flags(hash, CRYPTO_TFM_NEED_KEY);
481 } 472 }
482 if (alg->export)
483 hash->export = alg->export;
484 if (alg->import)
485 hash->import = alg->import;
486 473
487 return 0; 474 return 0;
488} 475}
diff --git a/crypto/algapi.c b/crypto/algapi.c
index 395b082d03a9..2a0271b5f62a 100644
--- a/crypto/algapi.c
+++ b/crypto/algapi.c
@@ -543,9 +543,6 @@ int crypto_register_instance(struct crypto_template *tmpl,
543 inst->alg.cra_module = tmpl->module; 543 inst->alg.cra_module = tmpl->module;
544 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE; 544 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
545 545
546 if (unlikely(!crypto_mod_get(&inst->alg)))
547 return -EAGAIN;
548
549 down_write(&crypto_alg_sem); 546 down_write(&crypto_alg_sem);
550 547
551 larval = __crypto_register_alg(&inst->alg); 548 larval = __crypto_register_alg(&inst->alg);
@@ -563,14 +560,9 @@ unlock:
563 goto err; 560 goto err;
564 561
565 crypto_wait_for_test(larval); 562 crypto_wait_for_test(larval);
566
567 /* Remove instance if test failed */
568 if (!(inst->alg.cra_flags & CRYPTO_ALG_TESTED))
569 crypto_unregister_instance(inst);
570 err = 0; 563 err = 0;
571 564
572err: 565err:
573 crypto_mod_put(&inst->alg);
574 return err; 566 return err;
575} 567}
576EXPORT_SYMBOL_GPL(crypto_register_instance); 568EXPORT_SYMBOL_GPL(crypto_register_instance);
diff --git a/crypto/api.c b/crypto/api.c
index 70a894e52ff3..1d5290c67108 100644
--- a/crypto/api.c
+++ b/crypto/api.c
@@ -193,17 +193,24 @@ static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
193 return alg; 193 return alg;
194} 194}
195 195
196struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask) 196static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
197 u32 mask)
197{ 198{
198 struct crypto_alg *alg; 199 struct crypto_alg *alg;
200 u32 test = 0;
201
202 if (!((type | mask) & CRYPTO_ALG_TESTED))
203 test |= CRYPTO_ALG_TESTED;
199 204
200 down_read(&crypto_alg_sem); 205 down_read(&crypto_alg_sem);
201 alg = __crypto_alg_lookup(name, type, mask); 206 alg = __crypto_alg_lookup(name, type | test, mask | test);
207 if (!alg && test)
208 alg = __crypto_alg_lookup(name, type, mask) ?
209 ERR_PTR(-ELIBBAD) : NULL;
202 up_read(&crypto_alg_sem); 210 up_read(&crypto_alg_sem);
203 211
204 return alg; 212 return alg;
205} 213}
206EXPORT_SYMBOL_GPL(crypto_alg_lookup);
207 214
208static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, 215static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
209 u32 mask) 216 u32 mask)
@@ -227,10 +234,12 @@ static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
227 alg = crypto_alg_lookup(name, type, mask); 234 alg = crypto_alg_lookup(name, type, mask);
228 } 235 }
229 236
230 if (alg) 237 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
231 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; 238 alg = crypto_larval_wait(alg);
239 else if (!alg)
240 alg = crypto_larval_add(name, type, mask);
232 241
233 return crypto_larval_add(name, type, mask); 242 return alg;
234} 243}
235 244
236int crypto_probing_notify(unsigned long val, void *v) 245int crypto_probing_notify(unsigned long val, void *v)
@@ -253,11 +262,6 @@ struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
253 struct crypto_alg *larval; 262 struct crypto_alg *larval;
254 int ok; 263 int ok;
255 264
256 if (!((type | mask) & CRYPTO_ALG_TESTED)) {
257 type |= CRYPTO_ALG_TESTED;
258 mask |= CRYPTO_ALG_TESTED;
259 }
260
261 /* 265 /*
262 * If the internal flag is set for a cipher, require a caller to 266 * If the internal flag is set for a cipher, require a caller to
263 * to invoke the cipher with the internal flag to use that cipher. 267 * to invoke the cipher with the internal flag to use that cipher.
@@ -485,20 +489,14 @@ struct crypto_alg *crypto_find_alg(const char *alg_name,
485 const struct crypto_type *frontend, 489 const struct crypto_type *frontend,
486 u32 type, u32 mask) 490 u32 type, u32 mask)
487{ 491{
488 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
489 crypto_alg_mod_lookup;
490
491 if (frontend) { 492 if (frontend) {
492 type &= frontend->maskclear; 493 type &= frontend->maskclear;
493 mask &= frontend->maskclear; 494 mask &= frontend->maskclear;
494 type |= frontend->type; 495 type |= frontend->type;
495 mask |= frontend->maskset; 496 mask |= frontend->maskset;
496
497 if (frontend->lookup)
498 lookup = frontend->lookup;
499 } 497 }
500 498
501 return lookup(alg_name, type, mask); 499 return crypto_alg_mod_lookup(alg_name, type, mask);
502} 500}
503EXPORT_SYMBOL_GPL(crypto_find_alg); 501EXPORT_SYMBOL_GPL(crypto_find_alg);
504 502
diff --git a/crypto/cfb.c b/crypto/cfb.c
new file mode 100644
index 000000000000..94ee39bed758
--- /dev/null
+++ b/crypto/cfb.c
@@ -0,0 +1,353 @@
1//SPDX-License-Identifier: GPL-2.0
2/*
3 * CFB: Cipher FeedBack mode
4 *
5 * Copyright (c) 2018 James.Bottomley@HansenPartnership.com
6 *
7 * CFB is a stream cipher mode which is layered on to a block
8 * encryption scheme. It works very much like a one time pad where
9 * the pad is generated initially from the encrypted IV and then
10 * subsequently from the encrypted previous block of ciphertext. The
11 * pad is XOR'd into the plain text to get the final ciphertext.
12 *
13 * The scheme of CFB is best described by wikipedia:
14 *
15 * https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#CFB
16 *
17 * Note that since the pad for both encryption and decryption is
18 * generated by an encryption operation, CFB never uses the block
19 * decryption function.
20 */
21
22#include <crypto/algapi.h>
23#include <crypto/internal/skcipher.h>
24#include <linux/err.h>
25#include <linux/init.h>
26#include <linux/kernel.h>
27#include <linux/module.h>
28#include <linux/slab.h>
29#include <linux/string.h>
30#include <linux/types.h>
31
32struct crypto_cfb_ctx {
33 struct crypto_cipher *child;
34};
35
36static unsigned int crypto_cfb_bsize(struct crypto_skcipher *tfm)
37{
38 struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(tfm);
39 struct crypto_cipher *child = ctx->child;
40
41 return crypto_cipher_blocksize(child);
42}
43
44static void crypto_cfb_encrypt_one(struct crypto_skcipher *tfm,
45 const u8 *src, u8 *dst)
46{
47 struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(tfm);
48
49 crypto_cipher_encrypt_one(ctx->child, dst, src);
50}
51
52/* final encrypt and decrypt is the same */
53static void crypto_cfb_final(struct skcipher_walk *walk,
54 struct crypto_skcipher *tfm)
55{
56 const unsigned int bsize = crypto_cfb_bsize(tfm);
57 const unsigned long alignmask = crypto_skcipher_alignmask(tfm);
58 u8 tmp[bsize + alignmask];
59 u8 *stream = PTR_ALIGN(tmp + 0, alignmask + 1);
60 u8 *src = walk->src.virt.addr;
61 u8 *dst = walk->dst.virt.addr;
62 u8 *iv = walk->iv;
63 unsigned int nbytes = walk->nbytes;
64
65 crypto_cfb_encrypt_one(tfm, iv, stream);
66 crypto_xor_cpy(dst, stream, src, nbytes);
67}
68
69static int crypto_cfb_encrypt_segment(struct skcipher_walk *walk,
70 struct crypto_skcipher *tfm)
71{
72 const unsigned int bsize = crypto_cfb_bsize(tfm);
73 unsigned int nbytes = walk->nbytes;
74 u8 *src = walk->src.virt.addr;
75 u8 *dst = walk->dst.virt.addr;
76 u8 *iv = walk->iv;
77
78 do {
79 crypto_cfb_encrypt_one(tfm, iv, dst);
80 crypto_xor(dst, src, bsize);
81 memcpy(iv, dst, bsize);
82
83 src += bsize;
84 dst += bsize;
85 } while ((nbytes -= bsize) >= bsize);
86
87 return nbytes;
88}
89
90static int crypto_cfb_encrypt_inplace(struct skcipher_walk *walk,
91 struct crypto_skcipher *tfm)
92{
93 const unsigned int bsize = crypto_cfb_bsize(tfm);
94 unsigned int nbytes = walk->nbytes;
95 u8 *src = walk->src.virt.addr;
96 u8 *iv = walk->iv;
97 u8 tmp[bsize];
98
99 do {
100 crypto_cfb_encrypt_one(tfm, iv, tmp);
101 crypto_xor(src, tmp, bsize);
102 iv = src;
103
104 src += bsize;
105 } while ((nbytes -= bsize) >= bsize);
106
107 memcpy(walk->iv, iv, bsize);
108
109 return nbytes;
110}
111
112static int crypto_cfb_encrypt(struct skcipher_request *req)
113{
114 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
115 struct skcipher_walk walk;
116 unsigned int bsize = crypto_cfb_bsize(tfm);
117 int err;
118
119 err = skcipher_walk_virt(&walk, req, false);
120
121 while (walk.nbytes >= bsize) {
122 if (walk.src.virt.addr == walk.dst.virt.addr)
123 err = crypto_cfb_encrypt_inplace(&walk, tfm);
124 else
125 err = crypto_cfb_encrypt_segment(&walk, tfm);
126 err = skcipher_walk_done(&walk, err);
127 }
128
129 if (walk.nbytes) {
130 crypto_cfb_final(&walk, tfm);
131 err = skcipher_walk_done(&walk, 0);
132 }
133
134 return err;
135}
136
137static int crypto_cfb_decrypt_segment(struct skcipher_walk *walk,
138 struct crypto_skcipher *tfm)
139{
140 const unsigned int bsize = crypto_cfb_bsize(tfm);
141 unsigned int nbytes = walk->nbytes;
142 u8 *src = walk->src.virt.addr;
143 u8 *dst = walk->dst.virt.addr;
144 u8 *iv = walk->iv;
145
146 do {
147 crypto_cfb_encrypt_one(tfm, iv, dst);
148 crypto_xor(dst, iv, bsize);
149 iv = src;
150
151 src += bsize;
152 dst += bsize;
153 } while ((nbytes -= bsize) >= bsize);
154
155 memcpy(walk->iv, iv, bsize);
156
157 return nbytes;
158}
159
160static int crypto_cfb_decrypt_inplace(struct skcipher_walk *walk,
161 struct crypto_skcipher *tfm)
162{
163 const unsigned int bsize = crypto_cfb_bsize(tfm);
164 unsigned int nbytes = walk->nbytes;
165 u8 *src = walk->src.virt.addr;
166 u8 *iv = walk->iv;
167 u8 tmp[bsize];
168
169 do {
170 crypto_cfb_encrypt_one(tfm, iv, tmp);
171 memcpy(iv, src, bsize);
172 crypto_xor(src, tmp, bsize);
173 src += bsize;
174 } while ((nbytes -= bsize) >= bsize);
175
176 memcpy(walk->iv, iv, bsize);
177
178 return nbytes;
179}
180
181static int crypto_cfb_decrypt_blocks(struct skcipher_walk *walk,
182 struct crypto_skcipher *tfm)
183{
184 if (walk->src.virt.addr == walk->dst.virt.addr)
185 return crypto_cfb_decrypt_inplace(walk, tfm);
186 else
187 return crypto_cfb_decrypt_segment(walk, tfm);
188}
189
190static int crypto_cfb_setkey(struct crypto_skcipher *parent, const u8 *key,
191 unsigned int keylen)
192{
193 struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(parent);
194 struct crypto_cipher *child = ctx->child;
195 int err;
196
197 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
198 crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) &
199 CRYPTO_TFM_REQ_MASK);
200 err = crypto_cipher_setkey(child, key, keylen);
201 crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) &
202 CRYPTO_TFM_RES_MASK);
203 return err;
204}
205
206static int crypto_cfb_decrypt(struct skcipher_request *req)
207{
208 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
209 struct skcipher_walk walk;
210 const unsigned int bsize = crypto_cfb_bsize(tfm);
211 int err;
212
213 err = skcipher_walk_virt(&walk, req, false);
214
215 while (walk.nbytes >= bsize) {
216 err = crypto_cfb_decrypt_blocks(&walk, tfm);
217 err = skcipher_walk_done(&walk, err);
218 }
219
220 if (walk.nbytes) {
221 crypto_cfb_final(&walk, tfm);
222 err = skcipher_walk_done(&walk, 0);
223 }
224
225 return err;
226}
227
228static int crypto_cfb_init_tfm(struct crypto_skcipher *tfm)
229{
230 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
231 struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
232 struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(tfm);
233 struct crypto_cipher *cipher;
234
235 cipher = crypto_spawn_cipher(spawn);
236 if (IS_ERR(cipher))
237 return PTR_ERR(cipher);
238
239 ctx->child = cipher;
240 return 0;
241}
242
243static void crypto_cfb_exit_tfm(struct crypto_skcipher *tfm)
244{
245 struct crypto_cfb_ctx *ctx = crypto_skcipher_ctx(tfm);
246
247 crypto_free_cipher(ctx->child);
248}
249
250static void crypto_cfb_free(struct skcipher_instance *inst)
251{
252 crypto_drop_skcipher(skcipher_instance_ctx(inst));
253 kfree(inst);
254}
255
256static int crypto_cfb_create(struct crypto_template *tmpl, struct rtattr **tb)
257{
258 struct skcipher_instance *inst;
259 struct crypto_attr_type *algt;
260 struct crypto_spawn *spawn;
261 struct crypto_alg *alg;
262 u32 mask;
263 int err;
264
265 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER);
266 if (err)
267 return err;
268
269 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
270 if (!inst)
271 return -ENOMEM;
272
273 algt = crypto_get_attr_type(tb);
274 err = PTR_ERR(algt);
275 if (IS_ERR(algt))
276 goto err_free_inst;
277
278 mask = CRYPTO_ALG_TYPE_MASK |
279 crypto_requires_off(algt->type, algt->mask,
280 CRYPTO_ALG_NEED_FALLBACK);
281
282 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, mask);
283 err = PTR_ERR(alg);
284 if (IS_ERR(alg))
285 goto err_free_inst;
286
287 spawn = skcipher_instance_ctx(inst);
288 err = crypto_init_spawn(spawn, alg, skcipher_crypto_instance(inst),
289 CRYPTO_ALG_TYPE_MASK);
290 crypto_mod_put(alg);
291 if (err)
292 goto err_free_inst;
293
294 err = crypto_inst_setname(skcipher_crypto_instance(inst), "cfb", alg);
295 if (err)
296 goto err_drop_spawn;
297
298 inst->alg.base.cra_priority = alg->cra_priority;
299 /* we're a stream cipher independend of the crypto cra_blocksize */
300 inst->alg.base.cra_blocksize = 1;
301 inst->alg.base.cra_alignmask = alg->cra_alignmask;
302
303 inst->alg.ivsize = alg->cra_blocksize;
304 inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize;
305 inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize;
306
307 inst->alg.base.cra_ctxsize = sizeof(struct crypto_cfb_ctx);
308
309 inst->alg.init = crypto_cfb_init_tfm;
310 inst->alg.exit = crypto_cfb_exit_tfm;
311
312 inst->alg.setkey = crypto_cfb_setkey;
313 inst->alg.encrypt = crypto_cfb_encrypt;
314 inst->alg.decrypt = crypto_cfb_decrypt;
315
316 inst->free = crypto_cfb_free;
317
318 err = skcipher_register_instance(tmpl, inst);
319 if (err)
320 goto err_drop_spawn;
321
322out:
323 return err;
324
325err_drop_spawn:
326 crypto_drop_spawn(spawn);
327err_free_inst:
328 kfree(inst);
329 goto out;
330}
331
332static struct crypto_template crypto_cfb_tmpl = {
333 .name = "cfb",
334 .create = crypto_cfb_create,
335 .module = THIS_MODULE,
336};
337
338static int __init crypto_cfb_module_init(void)
339{
340 return crypto_register_template(&crypto_cfb_tmpl);
341}
342
343static void __exit crypto_cfb_module_exit(void)
344{
345 crypto_unregister_template(&crypto_cfb_tmpl);
346}
347
348module_init(crypto_cfb_module_init);
349module_exit(crypto_cfb_module_exit);
350
351MODULE_LICENSE("GPL");
352MODULE_DESCRIPTION("CFB block cipher algorithm");
353MODULE_ALIAS_CRYPTO("cfb");
diff --git a/crypto/crypto_engine.c b/crypto/crypto_engine.c
index 61e7c4e02fd2..992e8d8dcdd9 100644
--- a/crypto/crypto_engine.c
+++ b/crypto/crypto_engine.c
@@ -15,13 +15,50 @@
15#include <linux/err.h> 15#include <linux/err.h>
16#include <linux/delay.h> 16#include <linux/delay.h>
17#include <crypto/engine.h> 17#include <crypto/engine.h>
18#include <crypto/internal/hash.h>
19#include <uapi/linux/sched/types.h> 18#include <uapi/linux/sched/types.h>
20#include "internal.h" 19#include "internal.h"
21 20
22#define CRYPTO_ENGINE_MAX_QLEN 10 21#define CRYPTO_ENGINE_MAX_QLEN 10
23 22
24/** 23/**
24 * crypto_finalize_request - finalize one request if the request is done
25 * @engine: the hardware engine
26 * @req: the request need to be finalized
27 * @err: error number
28 */
29static void crypto_finalize_request(struct crypto_engine *engine,
30 struct crypto_async_request *req, int err)
31{
32 unsigned long flags;
33 bool finalize_cur_req = false;
34 int ret;
35 struct crypto_engine_ctx *enginectx;
36
37 spin_lock_irqsave(&engine->queue_lock, flags);
38 if (engine->cur_req == req)
39 finalize_cur_req = true;
40 spin_unlock_irqrestore(&engine->queue_lock, flags);
41
42 if (finalize_cur_req) {
43 enginectx = crypto_tfm_ctx(req->tfm);
44 if (engine->cur_req_prepared &&
45 enginectx->op.unprepare_request) {
46 ret = enginectx->op.unprepare_request(engine, req);
47 if (ret)
48 dev_err(engine->dev, "failed to unprepare request\n");
49 }
50 spin_lock_irqsave(&engine->queue_lock, flags);
51 engine->cur_req = NULL;
52 engine->cur_req_prepared = false;
53 spin_unlock_irqrestore(&engine->queue_lock, flags);
54 }
55
56 req->complete(req, err);
57
58 kthread_queue_work(engine->kworker, &engine->pump_requests);
59}
60
61/**
25 * crypto_pump_requests - dequeue one request from engine queue to process 62 * crypto_pump_requests - dequeue one request from engine queue to process
26 * @engine: the hardware engine 63 * @engine: the hardware engine
27 * @in_kthread: true if we are in the context of the request pump thread 64 * @in_kthread: true if we are in the context of the request pump thread
@@ -34,11 +71,10 @@ static void crypto_pump_requests(struct crypto_engine *engine,
34 bool in_kthread) 71 bool in_kthread)
35{ 72{
36 struct crypto_async_request *async_req, *backlog; 73 struct crypto_async_request *async_req, *backlog;
37 struct ahash_request *hreq;
38 struct ablkcipher_request *breq;
39 unsigned long flags; 74 unsigned long flags;
40 bool was_busy = false; 75 bool was_busy = false;
41 int ret, rtype; 76 int ret;
77 struct crypto_engine_ctx *enginectx;
42 78
43 spin_lock_irqsave(&engine->queue_lock, flags); 79 spin_lock_irqsave(&engine->queue_lock, flags);
44 80
@@ -94,7 +130,6 @@ static void crypto_pump_requests(struct crypto_engine *engine,
94 130
95 spin_unlock_irqrestore(&engine->queue_lock, flags); 131 spin_unlock_irqrestore(&engine->queue_lock, flags);
96 132
97 rtype = crypto_tfm_alg_type(engine->cur_req->tfm);
98 /* Until here we get the request need to be encrypted successfully */ 133 /* Until here we get the request need to be encrypted successfully */
99 if (!was_busy && engine->prepare_crypt_hardware) { 134 if (!was_busy && engine->prepare_crypt_hardware) {
100 ret = engine->prepare_crypt_hardware(engine); 135 ret = engine->prepare_crypt_hardware(engine);
@@ -104,57 +139,31 @@ static void crypto_pump_requests(struct crypto_engine *engine,
104 } 139 }
105 } 140 }
106 141
107 switch (rtype) { 142 enginectx = crypto_tfm_ctx(async_req->tfm);
108 case CRYPTO_ALG_TYPE_AHASH: 143
109 hreq = ahash_request_cast(engine->cur_req); 144 if (enginectx->op.prepare_request) {
110 if (engine->prepare_hash_request) { 145 ret = enginectx->op.prepare_request(engine, async_req);
111 ret = engine->prepare_hash_request(engine, hreq);
112 if (ret) {
113 dev_err(engine->dev, "failed to prepare request: %d\n",
114 ret);
115 goto req_err;
116 }
117 engine->cur_req_prepared = true;
118 }
119 ret = engine->hash_one_request(engine, hreq);
120 if (ret) {
121 dev_err(engine->dev, "failed to hash one request from queue\n");
122 goto req_err;
123 }
124 return;
125 case CRYPTO_ALG_TYPE_ABLKCIPHER:
126 breq = ablkcipher_request_cast(engine->cur_req);
127 if (engine->prepare_cipher_request) {
128 ret = engine->prepare_cipher_request(engine, breq);
129 if (ret) {
130 dev_err(engine->dev, "failed to prepare request: %d\n",
131 ret);
132 goto req_err;
133 }
134 engine->cur_req_prepared = true;
135 }
136 ret = engine->cipher_one_request(engine, breq);
137 if (ret) { 146 if (ret) {
138 dev_err(engine->dev, "failed to cipher one request from queue\n"); 147 dev_err(engine->dev, "failed to prepare request: %d\n",
148 ret);
139 goto req_err; 149 goto req_err;
140 } 150 }
141 return; 151 engine->cur_req_prepared = true;
142 default: 152 }
143 dev_err(engine->dev, "failed to prepare request of unknown type\n"); 153 if (!enginectx->op.do_one_request) {
144 return; 154 dev_err(engine->dev, "failed to do request\n");
155 ret = -EINVAL;
156 goto req_err;
145 } 157 }
158 ret = enginectx->op.do_one_request(engine, async_req);
159 if (ret) {
160 dev_err(engine->dev, "Failed to do one request from queue: %d\n", ret);
161 goto req_err;
162 }
163 return;
146 164
147req_err: 165req_err:
148 switch (rtype) { 166 crypto_finalize_request(engine, async_req, ret);
149 case CRYPTO_ALG_TYPE_AHASH:
150 hreq = ahash_request_cast(engine->cur_req);
151 crypto_finalize_hash_request(engine, hreq, ret);
152 break;
153 case CRYPTO_ALG_TYPE_ABLKCIPHER:
154 breq = ablkcipher_request_cast(engine->cur_req);
155 crypto_finalize_cipher_request(engine, breq, ret);
156 break;
157 }
158 return; 167 return;
159 168
160out: 169out:
@@ -170,13 +179,12 @@ static void crypto_pump_work(struct kthread_work *work)
170} 179}
171 180
172/** 181/**
173 * crypto_transfer_cipher_request - transfer the new request into the 182 * crypto_transfer_request - transfer the new request into the engine queue
174 * enginequeue
175 * @engine: the hardware engine 183 * @engine: the hardware engine
176 * @req: the request need to be listed into the engine queue 184 * @req: the request need to be listed into the engine queue
177 */ 185 */
178int crypto_transfer_cipher_request(struct crypto_engine *engine, 186static int crypto_transfer_request(struct crypto_engine *engine,
179 struct ablkcipher_request *req, 187 struct crypto_async_request *req,
180 bool need_pump) 188 bool need_pump)
181{ 189{
182 unsigned long flags; 190 unsigned long flags;
@@ -189,7 +197,7 @@ int crypto_transfer_cipher_request(struct crypto_engine *engine,
189 return -ESHUTDOWN; 197 return -ESHUTDOWN;
190 } 198 }
191 199
192 ret = ablkcipher_enqueue_request(&engine->queue, req); 200 ret = crypto_enqueue_request(&engine->queue, req);
193 201
194 if (!engine->busy && need_pump) 202 if (!engine->busy && need_pump)
195 kthread_queue_work(engine->kworker, &engine->pump_requests); 203 kthread_queue_work(engine->kworker, &engine->pump_requests);
@@ -197,102 +205,131 @@ int crypto_transfer_cipher_request(struct crypto_engine *engine,
197 spin_unlock_irqrestore(&engine->queue_lock, flags); 205 spin_unlock_irqrestore(&engine->queue_lock, flags);
198 return ret; 206 return ret;
199} 207}
200EXPORT_SYMBOL_GPL(crypto_transfer_cipher_request);
201 208
202/** 209/**
203 * crypto_transfer_cipher_request_to_engine - transfer one request to list 210 * crypto_transfer_request_to_engine - transfer one request to list
204 * into the engine queue 211 * into the engine queue
205 * @engine: the hardware engine 212 * @engine: the hardware engine
206 * @req: the request need to be listed into the engine queue 213 * @req: the request need to be listed into the engine queue
207 */ 214 */
208int crypto_transfer_cipher_request_to_engine(struct crypto_engine *engine, 215static int crypto_transfer_request_to_engine(struct crypto_engine *engine,
209 struct ablkcipher_request *req) 216 struct crypto_async_request *req)
210{ 217{
211 return crypto_transfer_cipher_request(engine, req, true); 218 return crypto_transfer_request(engine, req, true);
212} 219}
213EXPORT_SYMBOL_GPL(crypto_transfer_cipher_request_to_engine);
214 220
215/** 221/**
216 * crypto_transfer_hash_request - transfer the new request into the 222 * crypto_transfer_ablkcipher_request_to_engine - transfer one ablkcipher_request
217 * enginequeue 223 * to list into the engine queue
218 * @engine: the hardware engine 224 * @engine: the hardware engine
219 * @req: the request need to be listed into the engine queue 225 * @req: the request need to be listed into the engine queue
226 * TODO: Remove this function when skcipher conversion is finished
220 */ 227 */
221int crypto_transfer_hash_request(struct crypto_engine *engine, 228int crypto_transfer_ablkcipher_request_to_engine(struct crypto_engine *engine,
222 struct ahash_request *req, bool need_pump) 229 struct ablkcipher_request *req)
223{ 230{
224 unsigned long flags; 231 return crypto_transfer_request_to_engine(engine, &req->base);
225 int ret; 232}
226 233EXPORT_SYMBOL_GPL(crypto_transfer_ablkcipher_request_to_engine);
227 spin_lock_irqsave(&engine->queue_lock, flags);
228
229 if (!engine->running) {
230 spin_unlock_irqrestore(&engine->queue_lock, flags);
231 return -ESHUTDOWN;
232 }
233
234 ret = ahash_enqueue_request(&engine->queue, req);
235 234
236 if (!engine->busy && need_pump) 235/**
237 kthread_queue_work(engine->kworker, &engine->pump_requests); 236 * crypto_transfer_aead_request_to_engine - transfer one aead_request
237 * to list into the engine queue
238 * @engine: the hardware engine
239 * @req: the request need to be listed into the engine queue
240 */
241int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine,
242 struct aead_request *req)
243{
244 return crypto_transfer_request_to_engine(engine, &req->base);
245}
246EXPORT_SYMBOL_GPL(crypto_transfer_aead_request_to_engine);
238 247
239 spin_unlock_irqrestore(&engine->queue_lock, flags); 248/**
240 return ret; 249 * crypto_transfer_akcipher_request_to_engine - transfer one akcipher_request
250 * to list into the engine queue
251 * @engine: the hardware engine
252 * @req: the request need to be listed into the engine queue
253 */
254int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine,
255 struct akcipher_request *req)
256{
257 return crypto_transfer_request_to_engine(engine, &req->base);
241} 258}
242EXPORT_SYMBOL_GPL(crypto_transfer_hash_request); 259EXPORT_SYMBOL_GPL(crypto_transfer_akcipher_request_to_engine);
243 260
244/** 261/**
245 * crypto_transfer_hash_request_to_engine - transfer one request to list 262 * crypto_transfer_hash_request_to_engine - transfer one ahash_request
246 * into the engine queue 263 * to list into the engine queue
247 * @engine: the hardware engine 264 * @engine: the hardware engine
248 * @req: the request need to be listed into the engine queue 265 * @req: the request need to be listed into the engine queue
249 */ 266 */
250int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine, 267int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine,
251 struct ahash_request *req) 268 struct ahash_request *req)
252{ 269{
253 return crypto_transfer_hash_request(engine, req, true); 270 return crypto_transfer_request_to_engine(engine, &req->base);
254} 271}
255EXPORT_SYMBOL_GPL(crypto_transfer_hash_request_to_engine); 272EXPORT_SYMBOL_GPL(crypto_transfer_hash_request_to_engine);
256 273
257/** 274/**
258 * crypto_finalize_cipher_request - finalize one request if the request is done 275 * crypto_transfer_skcipher_request_to_engine - transfer one skcipher_request
276 * to list into the engine queue
277 * @engine: the hardware engine
278 * @req: the request need to be listed into the engine queue
279 */
280int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine,
281 struct skcipher_request *req)
282{
283 return crypto_transfer_request_to_engine(engine, &req->base);
284}
285EXPORT_SYMBOL_GPL(crypto_transfer_skcipher_request_to_engine);
286
287/**
288 * crypto_finalize_ablkcipher_request - finalize one ablkcipher_request if
289 * the request is done
259 * @engine: the hardware engine 290 * @engine: the hardware engine
260 * @req: the request need to be finalized 291 * @req: the request need to be finalized
261 * @err: error number 292 * @err: error number
293 * TODO: Remove this function when skcipher conversion is finished
262 */ 294 */
263void crypto_finalize_cipher_request(struct crypto_engine *engine, 295void crypto_finalize_ablkcipher_request(struct crypto_engine *engine,
264 struct ablkcipher_request *req, int err) 296 struct ablkcipher_request *req, int err)
265{ 297{
266 unsigned long flags; 298 return crypto_finalize_request(engine, &req->base, err);
267 bool finalize_cur_req = false; 299}
268 int ret; 300EXPORT_SYMBOL_GPL(crypto_finalize_ablkcipher_request);
269
270 spin_lock_irqsave(&engine->queue_lock, flags);
271 if (engine->cur_req == &req->base)
272 finalize_cur_req = true;
273 spin_unlock_irqrestore(&engine->queue_lock, flags);
274
275 if (finalize_cur_req) {
276 if (engine->cur_req_prepared &&
277 engine->unprepare_cipher_request) {
278 ret = engine->unprepare_cipher_request(engine, req);
279 if (ret)
280 dev_err(engine->dev, "failed to unprepare request\n");
281 }
282 spin_lock_irqsave(&engine->queue_lock, flags);
283 engine->cur_req = NULL;
284 engine->cur_req_prepared = false;
285 spin_unlock_irqrestore(&engine->queue_lock, flags);
286 }
287 301
288 req->base.complete(&req->base, err); 302/**
303 * crypto_finalize_aead_request - finalize one aead_request if
304 * the request is done
305 * @engine: the hardware engine
306 * @req: the request need to be finalized
307 * @err: error number
308 */
309void crypto_finalize_aead_request(struct crypto_engine *engine,
310 struct aead_request *req, int err)
311{
312 return crypto_finalize_request(engine, &req->base, err);
313}
314EXPORT_SYMBOL_GPL(crypto_finalize_aead_request);
289 315
290 kthread_queue_work(engine->kworker, &engine->pump_requests); 316/**
317 * crypto_finalize_akcipher_request - finalize one akcipher_request if
318 * the request is done
319 * @engine: the hardware engine
320 * @req: the request need to be finalized
321 * @err: error number
322 */
323void crypto_finalize_akcipher_request(struct crypto_engine *engine,
324 struct akcipher_request *req, int err)
325{
326 return crypto_finalize_request(engine, &req->base, err);
291} 327}
292EXPORT_SYMBOL_GPL(crypto_finalize_cipher_request); 328EXPORT_SYMBOL_GPL(crypto_finalize_akcipher_request);
293 329
294/** 330/**
295 * crypto_finalize_hash_request - finalize one request if the request is done 331 * crypto_finalize_hash_request - finalize one ahash_request if
332 * the request is done
296 * @engine: the hardware engine 333 * @engine: the hardware engine
297 * @req: the request need to be finalized 334 * @req: the request need to be finalized
298 * @err: error number 335 * @err: error number
@@ -300,35 +337,25 @@ EXPORT_SYMBOL_GPL(crypto_finalize_cipher_request);
300void crypto_finalize_hash_request(struct crypto_engine *engine, 337void crypto_finalize_hash_request(struct crypto_engine *engine,
301 struct ahash_request *req, int err) 338 struct ahash_request *req, int err)
302{ 339{
303 unsigned long flags; 340 return crypto_finalize_request(engine, &req->base, err);
304 bool finalize_cur_req = false;
305 int ret;
306
307 spin_lock_irqsave(&engine->queue_lock, flags);
308 if (engine->cur_req == &req->base)
309 finalize_cur_req = true;
310 spin_unlock_irqrestore(&engine->queue_lock, flags);
311
312 if (finalize_cur_req) {
313 if (engine->cur_req_prepared &&
314 engine->unprepare_hash_request) {
315 ret = engine->unprepare_hash_request(engine, req);
316 if (ret)
317 dev_err(engine->dev, "failed to unprepare request\n");
318 }
319 spin_lock_irqsave(&engine->queue_lock, flags);
320 engine->cur_req = NULL;
321 engine->cur_req_prepared = false;
322 spin_unlock_irqrestore(&engine->queue_lock, flags);
323 }
324
325 req->base.complete(&req->base, err);
326
327 kthread_queue_work(engine->kworker, &engine->pump_requests);
328} 341}
329EXPORT_SYMBOL_GPL(crypto_finalize_hash_request); 342EXPORT_SYMBOL_GPL(crypto_finalize_hash_request);
330 343
331/** 344/**
345 * crypto_finalize_skcipher_request - finalize one skcipher_request if
346 * the request is done
347 * @engine: the hardware engine
348 * @req: the request need to be finalized
349 * @err: error number
350 */
351void crypto_finalize_skcipher_request(struct crypto_engine *engine,
352 struct skcipher_request *req, int err)
353{
354 return crypto_finalize_request(engine, &req->base, err);
355}
356EXPORT_SYMBOL_GPL(crypto_finalize_skcipher_request);
357
358/**
332 * crypto_engine_start - start the hardware engine 359 * crypto_engine_start - start the hardware engine
333 * @engine: the hardware engine need to be started 360 * @engine: the hardware engine need to be started
334 * 361 *
diff --git a/crypto/crypto_user.c b/crypto/crypto_user.c
index 5c291eedaa70..0e89b5457cab 100644
--- a/crypto/crypto_user.c
+++ b/crypto/crypto_user.c
@@ -271,7 +271,7 @@ static int crypto_report(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
271 return -ENOENT; 271 return -ENOENT;
272 272
273 err = -ENOMEM; 273 err = -ENOMEM;
274 skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_ATOMIC); 274 skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_KERNEL);
275 if (!skb) 275 if (!skb)
276 goto drop_alg; 276 goto drop_alg;
277 277
diff --git a/crypto/ecc.c b/crypto/ecc.c
index 18f32f2a5e1c..9c066b5ac12d 100644
--- a/crypto/ecc.c
+++ b/crypto/ecc.c
@@ -1025,9 +1025,7 @@ int crypto_ecdh_shared_secret(unsigned int curve_id, unsigned int ndigits,
1025{ 1025{
1026 int ret = 0; 1026 int ret = 0;
1027 struct ecc_point *product, *pk; 1027 struct ecc_point *product, *pk;
1028 u64 priv[ndigits]; 1028 u64 *priv, *rand_z;
1029 u64 rand_z[ndigits];
1030 unsigned int nbytes;
1031 const struct ecc_curve *curve = ecc_get_curve(curve_id); 1029 const struct ecc_curve *curve = ecc_get_curve(curve_id);
1032 1030
1033 if (!private_key || !public_key || !curve) { 1031 if (!private_key || !public_key || !curve) {
@@ -1035,14 +1033,22 @@ int crypto_ecdh_shared_secret(unsigned int curve_id, unsigned int ndigits,
1035 goto out; 1033 goto out;
1036 } 1034 }
1037 1035
1038 nbytes = ndigits << ECC_DIGITS_TO_BYTES_SHIFT; 1036 priv = kmalloc_array(ndigits, sizeof(*priv), GFP_KERNEL);
1037 if (!priv) {
1038 ret = -ENOMEM;
1039 goto out;
1040 }
1039 1041
1040 get_random_bytes(rand_z, nbytes); 1042 rand_z = kmalloc_array(ndigits, sizeof(*rand_z), GFP_KERNEL);
1043 if (!rand_z) {
1044 ret = -ENOMEM;
1045 goto kfree_out;
1046 }
1041 1047
1042 pk = ecc_alloc_point(ndigits); 1048 pk = ecc_alloc_point(ndigits);
1043 if (!pk) { 1049 if (!pk) {
1044 ret = -ENOMEM; 1050 ret = -ENOMEM;
1045 goto out; 1051 goto kfree_out;
1046 } 1052 }
1047 1053
1048 product = ecc_alloc_point(ndigits); 1054 product = ecc_alloc_point(ndigits);
@@ -1051,6 +1057,8 @@ int crypto_ecdh_shared_secret(unsigned int curve_id, unsigned int ndigits,
1051 goto err_alloc_product; 1057 goto err_alloc_product;
1052 } 1058 }
1053 1059
1060 get_random_bytes(rand_z, ndigits << ECC_DIGITS_TO_BYTES_SHIFT);
1061
1054 ecc_swap_digits(public_key, pk->x, ndigits); 1062 ecc_swap_digits(public_key, pk->x, ndigits);
1055 ecc_swap_digits(&public_key[ndigits], pk->y, ndigits); 1063 ecc_swap_digits(&public_key[ndigits], pk->y, ndigits);
1056 ecc_swap_digits(private_key, priv, ndigits); 1064 ecc_swap_digits(private_key, priv, ndigits);
@@ -1065,6 +1073,9 @@ int crypto_ecdh_shared_secret(unsigned int curve_id, unsigned int ndigits,
1065 ecc_free_point(product); 1073 ecc_free_point(product);
1066err_alloc_product: 1074err_alloc_product:
1067 ecc_free_point(pk); 1075 ecc_free_point(pk);
1076kfree_out:
1077 kzfree(priv);
1078 kzfree(rand_z);
1068out: 1079out:
1069 return ret; 1080 return ret;
1070} 1081}
diff --git a/crypto/ecdh.c b/crypto/ecdh.c
index 3aca0933ec44..d2ec33f0e098 100644
--- a/crypto/ecdh.c
+++ b/crypto/ecdh.c
@@ -89,12 +89,19 @@ static int ecdh_compute_value(struct kpp_request *req)
89 if (!shared_secret) 89 if (!shared_secret)
90 goto free_pubkey; 90 goto free_pubkey;
91 91
92 copied = sg_copy_to_buffer(req->src, 1, public_key, 92 /* from here on it's invalid parameters */
93 public_key_sz); 93 ret = -EINVAL;
94 if (copied != public_key_sz) { 94
95 ret = -EINVAL; 95 /* must have exactly two points to be on the curve */
96 if (public_key_sz != req->src_len)
97 goto free_all;
98
99 copied = sg_copy_to_buffer(req->src,
100 sg_nents_for_len(req->src,
101 public_key_sz),
102 public_key, public_key_sz);
103 if (copied != public_key_sz)
96 goto free_all; 104 goto free_all;
97 }
98 105
99 ret = crypto_ecdh_shared_secret(ctx->curve_id, ctx->ndigits, 106 ret = crypto_ecdh_shared_secret(ctx->curve_id, ctx->ndigits,
100 ctx->private_key, public_key, 107 ctx->private_key, public_key,
@@ -111,7 +118,11 @@ static int ecdh_compute_value(struct kpp_request *req)
111 if (ret < 0) 118 if (ret < 0)
112 goto free_all; 119 goto free_all;
113 120
114 copied = sg_copy_from_buffer(req->dst, 1, buf, nbytes); 121 /* might want less than we've got */
122 nbytes = min_t(size_t, nbytes, req->dst_len);
123 copied = sg_copy_from_buffer(req->dst, sg_nents_for_len(req->dst,
124 nbytes),
125 buf, nbytes);
115 if (copied != nbytes) 126 if (copied != nbytes)
116 ret = -EINVAL; 127 ret = -EINVAL;
117 128
diff --git a/crypto/internal.h b/crypto/internal.h
index 5ac27fba10e8..9a3f39939fba 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -67,7 +67,6 @@ static inline unsigned int crypto_compress_ctxsize(struct crypto_alg *alg)
67} 67}
68 68
69struct crypto_alg *crypto_mod_get(struct crypto_alg *alg); 69struct crypto_alg *crypto_mod_get(struct crypto_alg *alg);
70struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask);
71struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask); 70struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask);
72 71
73int crypto_init_cipher_ops(struct crypto_tfm *tfm); 72int crypto_init_cipher_ops(struct crypto_tfm *tfm);
diff --git a/crypto/lrw.c b/crypto/lrw.c
index cbbd7c50ad19..954a7064a179 100644
--- a/crypto/lrw.c
+++ b/crypto/lrw.c
@@ -28,13 +28,31 @@
28 28
29#include <crypto/b128ops.h> 29#include <crypto/b128ops.h>
30#include <crypto/gf128mul.h> 30#include <crypto/gf128mul.h>
31#include <crypto/lrw.h>
32 31
33#define LRW_BUFFER_SIZE 128u 32#define LRW_BUFFER_SIZE 128u
34 33
34#define LRW_BLOCK_SIZE 16
35
35struct priv { 36struct priv {
36 struct crypto_skcipher *child; 37 struct crypto_skcipher *child;
37 struct lrw_table_ctx table; 38
39 /*
40 * optimizes multiplying a random (non incrementing, as at the
41 * start of a new sector) value with key2, we could also have
42 * used 4k optimization tables or no optimization at all. In the
43 * latter case we would have to store key2 here
44 */
45 struct gf128mul_64k *table;
46
47 /*
48 * stores:
49 * key2*{ 0,0,...0,0,0,0,1 }, key2*{ 0,0,...0,0,0,1,1 },
50 * key2*{ 0,0,...0,0,1,1,1 }, key2*{ 0,0,...0,1,1,1,1 }
51 * key2*{ 0,0,...1,1,1,1,1 }, etc
52 * needed for optimized multiplication of incrementing values
53 * with key2
54 */
55 be128 mulinc[128];
38}; 56};
39 57
40struct rctx { 58struct rctx {
@@ -65,11 +83,25 @@ static inline void setbit128_bbe(void *b, int bit)
65 ), b); 83 ), b);
66} 84}
67 85
68int lrw_init_table(struct lrw_table_ctx *ctx, const u8 *tweak) 86static int setkey(struct crypto_skcipher *parent, const u8 *key,
87 unsigned int keylen)
69{ 88{
89 struct priv *ctx = crypto_skcipher_ctx(parent);
90 struct crypto_skcipher *child = ctx->child;
91 int err, bsize = LRW_BLOCK_SIZE;
92 const u8 *tweak = key + keylen - bsize;
70 be128 tmp = { 0 }; 93 be128 tmp = { 0 };
71 int i; 94 int i;
72 95
96 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
97 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
98 CRYPTO_TFM_REQ_MASK);
99 err = crypto_skcipher_setkey(child, key, keylen - bsize);
100 crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
101 CRYPTO_TFM_RES_MASK);
102 if (err)
103 return err;
104
73 if (ctx->table) 105 if (ctx->table)
74 gf128mul_free_64k(ctx->table); 106 gf128mul_free_64k(ctx->table);
75 107
@@ -87,34 +119,6 @@ int lrw_init_table(struct lrw_table_ctx *ctx, const u8 *tweak)
87 119
88 return 0; 120 return 0;
89} 121}
90EXPORT_SYMBOL_GPL(lrw_init_table);
91
92void lrw_free_table(struct lrw_table_ctx *ctx)
93{
94 if (ctx->table)
95 gf128mul_free_64k(ctx->table);
96}
97EXPORT_SYMBOL_GPL(lrw_free_table);
98
99static int setkey(struct crypto_skcipher *parent, const u8 *key,
100 unsigned int keylen)
101{
102 struct priv *ctx = crypto_skcipher_ctx(parent);
103 struct crypto_skcipher *child = ctx->child;
104 int err, bsize = LRW_BLOCK_SIZE;
105 const u8 *tweak = key + keylen - bsize;
106
107 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
108 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
109 CRYPTO_TFM_REQ_MASK);
110 err = crypto_skcipher_setkey(child, key, keylen - bsize);
111 crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
112 CRYPTO_TFM_RES_MASK);
113 if (err)
114 return err;
115
116 return lrw_init_table(&ctx->table, tweak);
117}
118 122
119static inline void inc(be128 *iv) 123static inline void inc(be128 *iv)
120{ 124{
@@ -238,7 +242,7 @@ static int pre_crypt(struct skcipher_request *req)
238 /* T <- I*Key2, using the optimization 242 /* T <- I*Key2, using the optimization
239 * discussed in the specification */ 243 * discussed in the specification */
240 be128_xor(&rctx->t, &rctx->t, 244 be128_xor(&rctx->t, &rctx->t,
241 &ctx->table.mulinc[get_index128(iv)]); 245 &ctx->mulinc[get_index128(iv)]);
242 inc(iv); 246 inc(iv);
243 } while ((avail -= bs) >= bs); 247 } while ((avail -= bs) >= bs);
244 248
@@ -301,7 +305,7 @@ static int init_crypt(struct skcipher_request *req, crypto_completion_t done)
301 memcpy(&rctx->t, req->iv, sizeof(rctx->t)); 305 memcpy(&rctx->t, req->iv, sizeof(rctx->t));
302 306
303 /* T <- I*Key2 */ 307 /* T <- I*Key2 */
304 gf128mul_64k_bbe(&rctx->t, ctx->table.table); 308 gf128mul_64k_bbe(&rctx->t, ctx->table);
305 309
306 return 0; 310 return 0;
307} 311}
@@ -313,7 +317,7 @@ static void exit_crypt(struct skcipher_request *req)
313 rctx->left = 0; 317 rctx->left = 0;
314 318
315 if (rctx->ext) 319 if (rctx->ext)
316 kfree(rctx->ext); 320 kzfree(rctx->ext);
317} 321}
318 322
319static int do_encrypt(struct skcipher_request *req, int err) 323static int do_encrypt(struct skcipher_request *req, int err)
@@ -416,85 +420,6 @@ static int decrypt(struct skcipher_request *req)
416 return do_decrypt(req, init_crypt(req, decrypt_done)); 420 return do_decrypt(req, init_crypt(req, decrypt_done));
417} 421}
418 422
419int lrw_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst,
420 struct scatterlist *ssrc, unsigned int nbytes,
421 struct lrw_crypt_req *req)
422{
423 const unsigned int bsize = LRW_BLOCK_SIZE;
424 const unsigned int max_blks = req->tbuflen / bsize;
425 struct lrw_table_ctx *ctx = req->table_ctx;
426 struct blkcipher_walk walk;
427 unsigned int nblocks;
428 be128 *iv, *src, *dst, *t;
429 be128 *t_buf = req->tbuf;
430 int err, i;
431
432 BUG_ON(max_blks < 1);
433
434 blkcipher_walk_init(&walk, sdst, ssrc, nbytes);
435
436 err = blkcipher_walk_virt(desc, &walk);
437 nbytes = walk.nbytes;
438 if (!nbytes)
439 return err;
440
441 nblocks = min(walk.nbytes / bsize, max_blks);
442 src = (be128 *)walk.src.virt.addr;
443 dst = (be128 *)walk.dst.virt.addr;
444
445 /* calculate first value of T */
446 iv = (be128 *)walk.iv;
447 t_buf[0] = *iv;
448
449 /* T <- I*Key2 */
450 gf128mul_64k_bbe(&t_buf[0], ctx->table);
451
452 i = 0;
453 goto first;
454
455 for (;;) {
456 do {
457 for (i = 0; i < nblocks; i++) {
458 /* T <- I*Key2, using the optimization
459 * discussed in the specification */
460 be128_xor(&t_buf[i], t,
461 &ctx->mulinc[get_index128(iv)]);
462 inc(iv);
463first:
464 t = &t_buf[i];
465
466 /* PP <- T xor P */
467 be128_xor(dst + i, t, src + i);
468 }
469
470 /* CC <- E(Key2,PP) */
471 req->crypt_fn(req->crypt_ctx, (u8 *)dst,
472 nblocks * bsize);
473
474 /* C <- T xor CC */
475 for (i = 0; i < nblocks; i++)
476 be128_xor(dst + i, dst + i, &t_buf[i]);
477
478 src += nblocks;
479 dst += nblocks;
480 nbytes -= nblocks * bsize;
481 nblocks = min(nbytes / bsize, max_blks);
482 } while (nblocks > 0);
483
484 err = blkcipher_walk_done(desc, &walk, nbytes);
485 nbytes = walk.nbytes;
486 if (!nbytes)
487 break;
488
489 nblocks = min(nbytes / bsize, max_blks);
490 src = (be128 *)walk.src.virt.addr;
491 dst = (be128 *)walk.dst.virt.addr;
492 }
493
494 return err;
495}
496EXPORT_SYMBOL_GPL(lrw_crypt);
497
498static int init_tfm(struct crypto_skcipher *tfm) 423static int init_tfm(struct crypto_skcipher *tfm)
499{ 424{
500 struct skcipher_instance *inst = skcipher_alg_instance(tfm); 425 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
@@ -518,7 +443,8 @@ static void exit_tfm(struct crypto_skcipher *tfm)
518{ 443{
519 struct priv *ctx = crypto_skcipher_ctx(tfm); 444 struct priv *ctx = crypto_skcipher_ctx(tfm);
520 445
521 lrw_free_table(&ctx->table); 446 if (ctx->table)
447 gf128mul_free_64k(ctx->table);
522 crypto_free_skcipher(ctx->child); 448 crypto_free_skcipher(ctx->child);
523} 449}
524 450
diff --git a/crypto/mcryptd.c b/crypto/mcryptd.c
index fe5129d6ff4e..f14152147ce8 100644
--- a/crypto/mcryptd.c
+++ b/crypto/mcryptd.c
@@ -367,7 +367,7 @@ static void mcryptd_hash_update(struct crypto_async_request *req_async, int err)
367 goto out; 367 goto out;
368 368
369 rctx->out = req->result; 369 rctx->out = req->result;
370 err = ahash_mcryptd_update(&rctx->areq); 370 err = crypto_ahash_update(&rctx->areq);
371 if (err) { 371 if (err) {
372 req->base.complete = rctx->complete; 372 req->base.complete = rctx->complete;
373 goto out; 373 goto out;
@@ -394,7 +394,7 @@ static void mcryptd_hash_final(struct crypto_async_request *req_async, int err)
394 goto out; 394 goto out;
395 395
396 rctx->out = req->result; 396 rctx->out = req->result;
397 err = ahash_mcryptd_final(&rctx->areq); 397 err = crypto_ahash_final(&rctx->areq);
398 if (err) { 398 if (err) {
399 req->base.complete = rctx->complete; 399 req->base.complete = rctx->complete;
400 goto out; 400 goto out;
@@ -420,7 +420,7 @@ static void mcryptd_hash_finup(struct crypto_async_request *req_async, int err)
420 if (unlikely(err == -EINPROGRESS)) 420 if (unlikely(err == -EINPROGRESS))
421 goto out; 421 goto out;
422 rctx->out = req->result; 422 rctx->out = req->result;
423 err = ahash_mcryptd_finup(&rctx->areq); 423 err = crypto_ahash_finup(&rctx->areq);
424 424
425 if (err) { 425 if (err) {
426 req->base.complete = rctx->complete; 426 req->base.complete = rctx->complete;
@@ -455,7 +455,7 @@ static void mcryptd_hash_digest(struct crypto_async_request *req_async, int err)
455 rctx->complete, req_async); 455 rctx->complete, req_async);
456 456
457 rctx->out = req->result; 457 rctx->out = req->result;
458 err = ahash_mcryptd_digest(desc); 458 err = crypto_ahash_init(desc) ?: crypto_ahash_finup(desc);
459 459
460out: 460out:
461 local_bh_disable(); 461 local_bh_disable();
@@ -612,32 +612,6 @@ struct mcryptd_ahash *mcryptd_alloc_ahash(const char *alg_name,
612} 612}
613EXPORT_SYMBOL_GPL(mcryptd_alloc_ahash); 613EXPORT_SYMBOL_GPL(mcryptd_alloc_ahash);
614 614
615int ahash_mcryptd_digest(struct ahash_request *desc)
616{
617 return crypto_ahash_init(desc) ?: ahash_mcryptd_finup(desc);
618}
619
620int ahash_mcryptd_update(struct ahash_request *desc)
621{
622 /* alignment is to be done by multi-buffer crypto algorithm if needed */
623
624 return crypto_ahash_update(desc);
625}
626
627int ahash_mcryptd_finup(struct ahash_request *desc)
628{
629 /* alignment is to be done by multi-buffer crypto algorithm if needed */
630
631 return crypto_ahash_finup(desc);
632}
633
634int ahash_mcryptd_final(struct ahash_request *desc)
635{
636 /* alignment is to be done by multi-buffer crypto algorithm if needed */
637
638 return crypto_ahash_final(desc);
639}
640
641struct crypto_ahash *mcryptd_ahash_child(struct mcryptd_ahash *tfm) 615struct crypto_ahash *mcryptd_ahash_child(struct mcryptd_ahash *tfm)
642{ 616{
643 struct mcryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); 617 struct mcryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
diff --git a/crypto/md4.c b/crypto/md4.c
index 3515af425cc9..810fefb0a007 100644
--- a/crypto/md4.c
+++ b/crypto/md4.c
@@ -64,23 +64,6 @@ static inline u32 H(u32 x, u32 y, u32 z)
64#define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s)) 64#define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s))
65#define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s)) 65#define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s))
66 66
67/* XXX: this stuff can be optimized */
68static inline void le32_to_cpu_array(u32 *buf, unsigned int words)
69{
70 while (words--) {
71 __le32_to_cpus(buf);
72 buf++;
73 }
74}
75
76static inline void cpu_to_le32_array(u32 *buf, unsigned int words)
77{
78 while (words--) {
79 __cpu_to_le32s(buf);
80 buf++;
81 }
82}
83
84static void md4_transform(u32 *hash, u32 const *in) 67static void md4_transform(u32 *hash, u32 const *in)
85{ 68{
86 u32 a, b, c, d; 69 u32 a, b, c, d;
diff --git a/crypto/md5.c b/crypto/md5.c
index f7ae1a48225b..f776ef43d621 100644
--- a/crypto/md5.c
+++ b/crypto/md5.c
@@ -32,23 +32,6 @@ const u8 md5_zero_message_hash[MD5_DIGEST_SIZE] = {
32}; 32};
33EXPORT_SYMBOL_GPL(md5_zero_message_hash); 33EXPORT_SYMBOL_GPL(md5_zero_message_hash);
34 34
35/* XXX: this stuff can be optimized */
36static inline void le32_to_cpu_array(u32 *buf, unsigned int words)
37{
38 while (words--) {
39 __le32_to_cpus(buf);
40 buf++;
41 }
42}
43
44static inline void cpu_to_le32_array(u32 *buf, unsigned int words)
45{
46 while (words--) {
47 __cpu_to_le32s(buf);
48 buf++;
49 }
50}
51
52#define F1(x, y, z) (z ^ (x & (y ^ z))) 35#define F1(x, y, z) (z ^ (x & (y ^ z)))
53#define F2(x, y, z) F1(z, x, y) 36#define F2(x, y, z) F1(z, x, y)
54#define F3(x, y, z) (x ^ y ^ z) 37#define F3(x, y, z) (x ^ y ^ z)
diff --git a/crypto/rsa-pkcs1pad.c b/crypto/rsa-pkcs1pad.c
index 2908f93c3e55..9893dbfc1af4 100644
--- a/crypto/rsa-pkcs1pad.c
+++ b/crypto/rsa-pkcs1pad.c
@@ -192,7 +192,7 @@ static int pkcs1pad_encrypt_sign_complete(struct akcipher_request *req, int err)
192 if (likely(!pad_len)) 192 if (likely(!pad_len))
193 goto out; 193 goto out;
194 194
195 out_buf = kzalloc(ctx->key_size, GFP_ATOMIC); 195 out_buf = kzalloc(ctx->key_size, GFP_KERNEL);
196 err = -ENOMEM; 196 err = -ENOMEM;
197 if (!out_buf) 197 if (!out_buf)
198 goto out; 198 goto out;
diff --git a/crypto/simd.c b/crypto/simd.c
index 208226d7f908..ea7240be3001 100644
--- a/crypto/simd.c
+++ b/crypto/simd.c
@@ -221,4 +221,54 @@ void simd_skcipher_free(struct simd_skcipher_alg *salg)
221} 221}
222EXPORT_SYMBOL_GPL(simd_skcipher_free); 222EXPORT_SYMBOL_GPL(simd_skcipher_free);
223 223
224int simd_register_skciphers_compat(struct skcipher_alg *algs, int count,
225 struct simd_skcipher_alg **simd_algs)
226{
227 int err;
228 int i;
229 const char *algname;
230 const char *drvname;
231 const char *basename;
232 struct simd_skcipher_alg *simd;
233
234 err = crypto_register_skciphers(algs, count);
235 if (err)
236 return err;
237
238 for (i = 0; i < count; i++) {
239 WARN_ON(strncmp(algs[i].base.cra_name, "__", 2));
240 WARN_ON(strncmp(algs[i].base.cra_driver_name, "__", 2));
241 algname = algs[i].base.cra_name + 2;
242 drvname = algs[i].base.cra_driver_name + 2;
243 basename = algs[i].base.cra_driver_name;
244 simd = simd_skcipher_create_compat(algname, drvname, basename);
245 err = PTR_ERR(simd);
246 if (IS_ERR(simd))
247 goto err_unregister;
248 simd_algs[i] = simd;
249 }
250 return 0;
251
252err_unregister:
253 simd_unregister_skciphers(algs, count, simd_algs);
254 return err;
255}
256EXPORT_SYMBOL_GPL(simd_register_skciphers_compat);
257
258void simd_unregister_skciphers(struct skcipher_alg *algs, int count,
259 struct simd_skcipher_alg **simd_algs)
260{
261 int i;
262
263 crypto_unregister_skciphers(algs, count);
264
265 for (i = 0; i < count; i++) {
266 if (simd_algs[i]) {
267 simd_skcipher_free(simd_algs[i]);
268 simd_algs[i] = NULL;
269 }
270 }
271}
272EXPORT_SYMBOL_GPL(simd_unregister_skciphers);
273
224MODULE_LICENSE("GPL"); 274MODULE_LICENSE("GPL");
diff --git a/crypto/sm4_generic.c b/crypto/sm4_generic.c
new file mode 100644
index 000000000000..f537a2766c55
--- /dev/null
+++ b/crypto/sm4_generic.c
@@ -0,0 +1,244 @@
1// SPDX-License-Identifier: GPL-2.0
2
3/*
4 * SM4 Cipher Algorithm.
5 *
6 * Copyright (C) 2018 ARM Limited or its affiliates.
7 * All rights reserved.
8 */
9
10#include <crypto/sm4.h>
11#include <linux/module.h>
12#include <linux/init.h>
13#include <linux/types.h>
14#include <linux/errno.h>
15#include <linux/crypto.h>
16#include <asm/byteorder.h>
17#include <asm/unaligned.h>
18
19static const u32 fk[4] = {
20 0xa3b1bac6, 0x56aa3350, 0x677d9197, 0xb27022dc
21};
22
23static const u8 sbox[256] = {
24 0xd6, 0x90, 0xe9, 0xfe, 0xcc, 0xe1, 0x3d, 0xb7,
25 0x16, 0xb6, 0x14, 0xc2, 0x28, 0xfb, 0x2c, 0x05,
26 0x2b, 0x67, 0x9a, 0x76, 0x2a, 0xbe, 0x04, 0xc3,
27 0xaa, 0x44, 0x13, 0x26, 0x49, 0x86, 0x06, 0x99,
28 0x9c, 0x42, 0x50, 0xf4, 0x91, 0xef, 0x98, 0x7a,
29 0x33, 0x54, 0x0b, 0x43, 0xed, 0xcf, 0xac, 0x62,
30 0xe4, 0xb3, 0x1c, 0xa9, 0xc9, 0x08, 0xe8, 0x95,
31 0x80, 0xdf, 0x94, 0xfa, 0x75, 0x8f, 0x3f, 0xa6,
32 0x47, 0x07, 0xa7, 0xfc, 0xf3, 0x73, 0x17, 0xba,
33 0x83, 0x59, 0x3c, 0x19, 0xe6, 0x85, 0x4f, 0xa8,
34 0x68, 0x6b, 0x81, 0xb2, 0x71, 0x64, 0xda, 0x8b,
35 0xf8, 0xeb, 0x0f, 0x4b, 0x70, 0x56, 0x9d, 0x35,
36 0x1e, 0x24, 0x0e, 0x5e, 0x63, 0x58, 0xd1, 0xa2,
37 0x25, 0x22, 0x7c, 0x3b, 0x01, 0x21, 0x78, 0x87,
38 0xd4, 0x00, 0x46, 0x57, 0x9f, 0xd3, 0x27, 0x52,
39 0x4c, 0x36, 0x02, 0xe7, 0xa0, 0xc4, 0xc8, 0x9e,
40 0xea, 0xbf, 0x8a, 0xd2, 0x40, 0xc7, 0x38, 0xb5,
41 0xa3, 0xf7, 0xf2, 0xce, 0xf9, 0x61, 0x15, 0xa1,
42 0xe0, 0xae, 0x5d, 0xa4, 0x9b, 0x34, 0x1a, 0x55,
43 0xad, 0x93, 0x32, 0x30, 0xf5, 0x8c, 0xb1, 0xe3,
44 0x1d, 0xf6, 0xe2, 0x2e, 0x82, 0x66, 0xca, 0x60,
45 0xc0, 0x29, 0x23, 0xab, 0x0d, 0x53, 0x4e, 0x6f,
46 0xd5, 0xdb, 0x37, 0x45, 0xde, 0xfd, 0x8e, 0x2f,
47 0x03, 0xff, 0x6a, 0x72, 0x6d, 0x6c, 0x5b, 0x51,
48 0x8d, 0x1b, 0xaf, 0x92, 0xbb, 0xdd, 0xbc, 0x7f,
49 0x11, 0xd9, 0x5c, 0x41, 0x1f, 0x10, 0x5a, 0xd8,
50 0x0a, 0xc1, 0x31, 0x88, 0xa5, 0xcd, 0x7b, 0xbd,
51 0x2d, 0x74, 0xd0, 0x12, 0xb8, 0xe5, 0xb4, 0xb0,
52 0x89, 0x69, 0x97, 0x4a, 0x0c, 0x96, 0x77, 0x7e,
53 0x65, 0xb9, 0xf1, 0x09, 0xc5, 0x6e, 0xc6, 0x84,
54 0x18, 0xf0, 0x7d, 0xec, 0x3a, 0xdc, 0x4d, 0x20,
55 0x79, 0xee, 0x5f, 0x3e, 0xd7, 0xcb, 0x39, 0x48
56};
57
58static const u32 ck[] = {
59 0x00070e15, 0x1c232a31, 0x383f464d, 0x545b6269,
60 0x70777e85, 0x8c939aa1, 0xa8afb6bd, 0xc4cbd2d9,
61 0xe0e7eef5, 0xfc030a11, 0x181f262d, 0x343b4249,
62 0x50575e65, 0x6c737a81, 0x888f969d, 0xa4abb2b9,
63 0xc0c7ced5, 0xdce3eaf1, 0xf8ff060d, 0x141b2229,
64 0x30373e45, 0x4c535a61, 0x686f767d, 0x848b9299,
65 0xa0a7aeb5, 0xbcc3cad1, 0xd8dfe6ed, 0xf4fb0209,
66 0x10171e25, 0x2c333a41, 0x484f565d, 0x646b7279
67};
68
69static u32 sm4_t_non_lin_sub(u32 x)
70{
71 int i;
72 u8 *b = (u8 *)&x;
73
74 for (i = 0; i < 4; ++i)
75 b[i] = sbox[b[i]];
76
77 return x;
78}
79
80static u32 sm4_key_lin_sub(u32 x)
81{
82 return x ^ rol32(x, 13) ^ rol32(x, 23);
83
84}
85
86static u32 sm4_enc_lin_sub(u32 x)
87{
88 return x ^ rol32(x, 2) ^ rol32(x, 10) ^ rol32(x, 18) ^ rol32(x, 24);
89}
90
91static u32 sm4_key_sub(u32 x)
92{
93 return sm4_key_lin_sub(sm4_t_non_lin_sub(x));
94}
95
96static u32 sm4_enc_sub(u32 x)
97{
98 return sm4_enc_lin_sub(sm4_t_non_lin_sub(x));
99}
100
101static u32 sm4_round(const u32 *x, const u32 rk)
102{
103 return x[0] ^ sm4_enc_sub(x[1] ^ x[2] ^ x[3] ^ rk);
104}
105
106
107/**
108 * crypto_sm4_expand_key - Expands the SM4 key as described in GB/T 32907-2016
109 * @ctx: The location where the computed key will be stored.
110 * @in_key: The supplied key.
111 * @key_len: The length of the supplied key.
112 *
113 * Returns 0 on success. The function fails only if an invalid key size (or
114 * pointer) is supplied.
115 */
116int crypto_sm4_expand_key(struct crypto_sm4_ctx *ctx, const u8 *in_key,
117 unsigned int key_len)
118{
119 u32 rk[4], t;
120 const u32 *key = (u32 *)in_key;
121 int i;
122
123 if (key_len != SM4_KEY_SIZE)
124 return -EINVAL;
125
126 for (i = 0; i < 4; ++i)
127 rk[i] = get_unaligned_be32(&key[i]) ^ fk[i];
128
129 for (i = 0; i < 32; ++i) {
130 t = rk[0] ^ sm4_key_sub(rk[1] ^ rk[2] ^ rk[3] ^ ck[i]);
131 ctx->rkey_enc[i] = t;
132 rk[0] = rk[1];
133 rk[1] = rk[2];
134 rk[2] = rk[3];
135 rk[3] = t;
136 }
137
138 for (i = 0; i < 32; ++i)
139 ctx->rkey_dec[i] = ctx->rkey_enc[31 - i];
140
141 return 0;
142}
143EXPORT_SYMBOL_GPL(crypto_sm4_expand_key);
144
145/**
146 * crypto_sm4_set_key - Set the AES key.
147 * @tfm: The %crypto_tfm that is used in the context.
148 * @in_key: The input key.
149 * @key_len: The size of the key.
150 *
151 * Returns 0 on success, on failure the %CRYPTO_TFM_RES_BAD_KEY_LEN flag in tfm
152 * is set. The function uses crypto_sm4_expand_key() to expand the key.
153 * &crypto_sm4_ctx _must_ be the private data embedded in @tfm which is
154 * retrieved with crypto_tfm_ctx().
155 */
156int crypto_sm4_set_key(struct crypto_tfm *tfm, const u8 *in_key,
157 unsigned int key_len)
158{
159 struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
160 u32 *flags = &tfm->crt_flags;
161 int ret;
162
163 ret = crypto_sm4_expand_key(ctx, in_key, key_len);
164 if (!ret)
165 return 0;
166
167 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
168 return -EINVAL;
169}
170EXPORT_SYMBOL_GPL(crypto_sm4_set_key);
171
172static void sm4_do_crypt(const u32 *rk, u32 *out, const u32 *in)
173{
174 u32 x[4], i, t;
175
176 for (i = 0; i < 4; ++i)
177 x[i] = get_unaligned_be32(&in[i]);
178
179 for (i = 0; i < 32; ++i) {
180 t = sm4_round(x, rk[i]);
181 x[0] = x[1];
182 x[1] = x[2];
183 x[2] = x[3];
184 x[3] = t;
185 }
186
187 for (i = 0; i < 4; ++i)
188 put_unaligned_be32(x[3 - i], &out[i]);
189}
190
191/* encrypt a block of text */
192
193static void sm4_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
194{
195 const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
196
197 sm4_do_crypt(ctx->rkey_enc, (u32 *)out, (u32 *)in);
198}
199
200/* decrypt a block of text */
201
202static void sm4_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
203{
204 const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
205
206 sm4_do_crypt(ctx->rkey_dec, (u32 *)out, (u32 *)in);
207}
208
209static struct crypto_alg sm4_alg = {
210 .cra_name = "sm4",
211 .cra_driver_name = "sm4-generic",
212 .cra_priority = 100,
213 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
214 .cra_blocksize = SM4_BLOCK_SIZE,
215 .cra_ctxsize = sizeof(struct crypto_sm4_ctx),
216 .cra_module = THIS_MODULE,
217 .cra_u = {
218 .cipher = {
219 .cia_min_keysize = SM4_KEY_SIZE,
220 .cia_max_keysize = SM4_KEY_SIZE,
221 .cia_setkey = crypto_sm4_set_key,
222 .cia_encrypt = sm4_encrypt,
223 .cia_decrypt = sm4_decrypt
224 }
225 }
226};
227
228static int __init sm4_init(void)
229{
230 return crypto_register_alg(&sm4_alg);
231}
232
233static void __exit sm4_fini(void)
234{
235 crypto_unregister_alg(&sm4_alg);
236}
237
238module_init(sm4_init);
239module_exit(sm4_fini);
240
241MODULE_DESCRIPTION("SM4 Cipher Algorithm");
242MODULE_LICENSE("GPL v2");
243MODULE_ALIAS_CRYPTO("sm4");
244MODULE_ALIAS_CRYPTO("sm4-generic");
diff --git a/crypto/speck.c b/crypto/speck.c
new file mode 100644
index 000000000000..58aa9f7f91f7
--- /dev/null
+++ b/crypto/speck.c
@@ -0,0 +1,307 @@
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Speck: a lightweight block cipher
4 *
5 * Copyright (c) 2018 Google, Inc
6 *
7 * Speck has 10 variants, including 5 block sizes. For now we only implement
8 * the variants Speck128/128, Speck128/192, Speck128/256, Speck64/96, and
9 * Speck64/128. Speck${B}/${K} denotes the variant with a block size of B bits
10 * and a key size of K bits. The Speck128 variants are believed to be the most
11 * secure variants, and they use the same block size and key sizes as AES. The
12 * Speck64 variants are less secure, but on 32-bit processors are usually
13 * faster. The remaining variants (Speck32, Speck48, and Speck96) are even less
14 * secure and/or not as well suited for implementation on either 32-bit or
15 * 64-bit processors, so are omitted.
16 *
17 * Reference: "The Simon and Speck Families of Lightweight Block Ciphers"
18 * https://eprint.iacr.org/2013/404.pdf
19 *
20 * In a correspondence, the Speck designers have also clarified that the words
21 * should be interpreted in little-endian format, and the words should be
22 * ordered such that the first word of each block is 'y' rather than 'x', and
23 * the first key word (rather than the last) becomes the first round key.
24 */
25
26#include <asm/unaligned.h>
27#include <crypto/speck.h>
28#include <linux/bitops.h>
29#include <linux/crypto.h>
30#include <linux/init.h>
31#include <linux/module.h>
32
33/* Speck128 */
34
35static __always_inline void speck128_round(u64 *x, u64 *y, u64 k)
36{
37 *x = ror64(*x, 8);
38 *x += *y;
39 *x ^= k;
40 *y = rol64(*y, 3);
41 *y ^= *x;
42}
43
44static __always_inline void speck128_unround(u64 *x, u64 *y, u64 k)
45{
46 *y ^= *x;
47 *y = ror64(*y, 3);
48 *x ^= k;
49 *x -= *y;
50 *x = rol64(*x, 8);
51}
52
53void crypto_speck128_encrypt(const struct speck128_tfm_ctx *ctx,
54 u8 *out, const u8 *in)
55{
56 u64 y = get_unaligned_le64(in);
57 u64 x = get_unaligned_le64(in + 8);
58 int i;
59
60 for (i = 0; i < ctx->nrounds; i++)
61 speck128_round(&x, &y, ctx->round_keys[i]);
62
63 put_unaligned_le64(y, out);
64 put_unaligned_le64(x, out + 8);
65}
66EXPORT_SYMBOL_GPL(crypto_speck128_encrypt);
67
68static void speck128_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
69{
70 crypto_speck128_encrypt(crypto_tfm_ctx(tfm), out, in);
71}
72
73void crypto_speck128_decrypt(const struct speck128_tfm_ctx *ctx,
74 u8 *out, const u8 *in)
75{
76 u64 y = get_unaligned_le64(in);
77 u64 x = get_unaligned_le64(in + 8);
78 int i;
79
80 for (i = ctx->nrounds - 1; i >= 0; i--)
81 speck128_unround(&x, &y, ctx->round_keys[i]);
82
83 put_unaligned_le64(y, out);
84 put_unaligned_le64(x, out + 8);
85}
86EXPORT_SYMBOL_GPL(crypto_speck128_decrypt);
87
88static void speck128_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
89{
90 crypto_speck128_decrypt(crypto_tfm_ctx(tfm), out, in);
91}
92
93int crypto_speck128_setkey(struct speck128_tfm_ctx *ctx, const u8 *key,
94 unsigned int keylen)
95{
96 u64 l[3];
97 u64 k;
98 int i;
99
100 switch (keylen) {
101 case SPECK128_128_KEY_SIZE:
102 k = get_unaligned_le64(key);
103 l[0] = get_unaligned_le64(key + 8);
104 ctx->nrounds = SPECK128_128_NROUNDS;
105 for (i = 0; i < ctx->nrounds; i++) {
106 ctx->round_keys[i] = k;
107 speck128_round(&l[0], &k, i);
108 }
109 break;
110 case SPECK128_192_KEY_SIZE:
111 k = get_unaligned_le64(key);
112 l[0] = get_unaligned_le64(key + 8);
113 l[1] = get_unaligned_le64(key + 16);
114 ctx->nrounds = SPECK128_192_NROUNDS;
115 for (i = 0; i < ctx->nrounds; i++) {
116 ctx->round_keys[i] = k;
117 speck128_round(&l[i % 2], &k, i);
118 }
119 break;
120 case SPECK128_256_KEY_SIZE:
121 k = get_unaligned_le64(key);
122 l[0] = get_unaligned_le64(key + 8);
123 l[1] = get_unaligned_le64(key + 16);
124 l[2] = get_unaligned_le64(key + 24);
125 ctx->nrounds = SPECK128_256_NROUNDS;
126 for (i = 0; i < ctx->nrounds; i++) {
127 ctx->round_keys[i] = k;
128 speck128_round(&l[i % 3], &k, i);
129 }
130 break;
131 default:
132 return -EINVAL;
133 }
134
135 return 0;
136}
137EXPORT_SYMBOL_GPL(crypto_speck128_setkey);
138
139static int speck128_setkey(struct crypto_tfm *tfm, const u8 *key,
140 unsigned int keylen)
141{
142 return crypto_speck128_setkey(crypto_tfm_ctx(tfm), key, keylen);
143}
144
145/* Speck64 */
146
147static __always_inline void speck64_round(u32 *x, u32 *y, u32 k)
148{
149 *x = ror32(*x, 8);
150 *x += *y;
151 *x ^= k;
152 *y = rol32(*y, 3);
153 *y ^= *x;
154}
155
156static __always_inline void speck64_unround(u32 *x, u32 *y, u32 k)
157{
158 *y ^= *x;
159 *y = ror32(*y, 3);
160 *x ^= k;
161 *x -= *y;
162 *x = rol32(*x, 8);
163}
164
165void crypto_speck64_encrypt(const struct speck64_tfm_ctx *ctx,
166 u8 *out, const u8 *in)
167{
168 u32 y = get_unaligned_le32(in);
169 u32 x = get_unaligned_le32(in + 4);
170 int i;
171
172 for (i = 0; i < ctx->nrounds; i++)
173 speck64_round(&x, &y, ctx->round_keys[i]);
174
175 put_unaligned_le32(y, out);
176 put_unaligned_le32(x, out + 4);
177}
178EXPORT_SYMBOL_GPL(crypto_speck64_encrypt);
179
180static void speck64_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
181{
182 crypto_speck64_encrypt(crypto_tfm_ctx(tfm), out, in);
183}
184
185void crypto_speck64_decrypt(const struct speck64_tfm_ctx *ctx,
186 u8 *out, const u8 *in)
187{
188 u32 y = get_unaligned_le32(in);
189 u32 x = get_unaligned_le32(in + 4);
190 int i;
191
192 for (i = ctx->nrounds - 1; i >= 0; i--)
193 speck64_unround(&x, &y, ctx->round_keys[i]);
194
195 put_unaligned_le32(y, out);
196 put_unaligned_le32(x, out + 4);
197}
198EXPORT_SYMBOL_GPL(crypto_speck64_decrypt);
199
200static void speck64_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
201{
202 crypto_speck64_decrypt(crypto_tfm_ctx(tfm), out, in);
203}
204
205int crypto_speck64_setkey(struct speck64_tfm_ctx *ctx, const u8 *key,
206 unsigned int keylen)
207{
208 u32 l[3];
209 u32 k;
210 int i;
211
212 switch (keylen) {
213 case SPECK64_96_KEY_SIZE:
214 k = get_unaligned_le32(key);
215 l[0] = get_unaligned_le32(key + 4);
216 l[1] = get_unaligned_le32(key + 8);
217 ctx->nrounds = SPECK64_96_NROUNDS;
218 for (i = 0; i < ctx->nrounds; i++) {
219 ctx->round_keys[i] = k;
220 speck64_round(&l[i % 2], &k, i);
221 }
222 break;
223 case SPECK64_128_KEY_SIZE:
224 k = get_unaligned_le32(key);
225 l[0] = get_unaligned_le32(key + 4);
226 l[1] = get_unaligned_le32(key + 8);
227 l[2] = get_unaligned_le32(key + 12);
228 ctx->nrounds = SPECK64_128_NROUNDS;
229 for (i = 0; i < ctx->nrounds; i++) {
230 ctx->round_keys[i] = k;
231 speck64_round(&l[i % 3], &k, i);
232 }
233 break;
234 default:
235 return -EINVAL;
236 }
237
238 return 0;
239}
240EXPORT_SYMBOL_GPL(crypto_speck64_setkey);
241
242static int speck64_setkey(struct crypto_tfm *tfm, const u8 *key,
243 unsigned int keylen)
244{
245 return crypto_speck64_setkey(crypto_tfm_ctx(tfm), key, keylen);
246}
247
248/* Algorithm definitions */
249
250static struct crypto_alg speck_algs[] = {
251 {
252 .cra_name = "speck128",
253 .cra_driver_name = "speck128-generic",
254 .cra_priority = 100,
255 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
256 .cra_blocksize = SPECK128_BLOCK_SIZE,
257 .cra_ctxsize = sizeof(struct speck128_tfm_ctx),
258 .cra_module = THIS_MODULE,
259 .cra_u = {
260 .cipher = {
261 .cia_min_keysize = SPECK128_128_KEY_SIZE,
262 .cia_max_keysize = SPECK128_256_KEY_SIZE,
263 .cia_setkey = speck128_setkey,
264 .cia_encrypt = speck128_encrypt,
265 .cia_decrypt = speck128_decrypt
266 }
267 }
268 }, {
269 .cra_name = "speck64",
270 .cra_driver_name = "speck64-generic",
271 .cra_priority = 100,
272 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
273 .cra_blocksize = SPECK64_BLOCK_SIZE,
274 .cra_ctxsize = sizeof(struct speck64_tfm_ctx),
275 .cra_module = THIS_MODULE,
276 .cra_u = {
277 .cipher = {
278 .cia_min_keysize = SPECK64_96_KEY_SIZE,
279 .cia_max_keysize = SPECK64_128_KEY_SIZE,
280 .cia_setkey = speck64_setkey,
281 .cia_encrypt = speck64_encrypt,
282 .cia_decrypt = speck64_decrypt
283 }
284 }
285 }
286};
287
288static int __init speck_module_init(void)
289{
290 return crypto_register_algs(speck_algs, ARRAY_SIZE(speck_algs));
291}
292
293static void __exit speck_module_exit(void)
294{
295 crypto_unregister_algs(speck_algs, ARRAY_SIZE(speck_algs));
296}
297
298module_init(speck_module_init);
299module_exit(speck_module_exit);
300
301MODULE_DESCRIPTION("Speck block cipher (generic)");
302MODULE_LICENSE("GPL");
303MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>");
304MODULE_ALIAS_CRYPTO("speck128");
305MODULE_ALIAS_CRYPTO("speck128-generic");
306MODULE_ALIAS_CRYPTO("speck64");
307MODULE_ALIAS_CRYPTO("speck64-generic");
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
index 14213a096fd2..51fe7c8744ae 100644
--- a/crypto/tcrypt.c
+++ b/crypto/tcrypt.c
@@ -1983,6 +1983,9 @@ static int do_test(const char *alg, u32 type, u32 mask, int m)
1983 case 190: 1983 case 190:
1984 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))"); 1984 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
1985 break; 1985 break;
1986 case 191:
1987 ret += tcrypt_test("ecb(sm4)");
1988 break;
1986 case 200: 1989 case 200:
1987 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0, 1990 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
1988 speed_template_16_24_32); 1991 speed_template_16_24_32);
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
index d5e23a142a04..af4a01c5037b 100644
--- a/crypto/testmgr.c
+++ b/crypto/testmgr.c
@@ -3001,6 +3001,33 @@ static const struct alg_test_desc alg_test_descs[] = {
3001 } 3001 }
3002 } 3002 }
3003 }, { 3003 }, {
3004 .alg = "ecb(sm4)",
3005 .test = alg_test_skcipher,
3006 .suite = {
3007 .cipher = {
3008 .enc = __VECS(sm4_enc_tv_template),
3009 .dec = __VECS(sm4_dec_tv_template)
3010 }
3011 }
3012 }, {
3013 .alg = "ecb(speck128)",
3014 .test = alg_test_skcipher,
3015 .suite = {
3016 .cipher = {
3017 .enc = __VECS(speck128_enc_tv_template),
3018 .dec = __VECS(speck128_dec_tv_template)
3019 }
3020 }
3021 }, {
3022 .alg = "ecb(speck64)",
3023 .test = alg_test_skcipher,
3024 .suite = {
3025 .cipher = {
3026 .enc = __VECS(speck64_enc_tv_template),
3027 .dec = __VECS(speck64_dec_tv_template)
3028 }
3029 }
3030 }, {
3004 .alg = "ecb(tea)", 3031 .alg = "ecb(tea)",
3005 .test = alg_test_skcipher, 3032 .test = alg_test_skcipher,
3006 .suite = { 3033 .suite = {
@@ -3558,6 +3585,24 @@ static const struct alg_test_desc alg_test_descs[] = {
3558 } 3585 }
3559 } 3586 }
3560 }, { 3587 }, {
3588 .alg = "xts(speck128)",
3589 .test = alg_test_skcipher,
3590 .suite = {
3591 .cipher = {
3592 .enc = __VECS(speck128_xts_enc_tv_template),
3593 .dec = __VECS(speck128_xts_dec_tv_template)
3594 }
3595 }
3596 }, {
3597 .alg = "xts(speck64)",
3598 .test = alg_test_skcipher,
3599 .suite = {
3600 .cipher = {
3601 .enc = __VECS(speck64_xts_enc_tv_template),
3602 .dec = __VECS(speck64_xts_dec_tv_template)
3603 }
3604 }
3605 }, {
3561 .alg = "xts(twofish)", 3606 .alg = "xts(twofish)",
3562 .test = alg_test_skcipher, 3607 .test = alg_test_skcipher,
3563 .suite = { 3608 .suite = {
diff --git a/crypto/testmgr.h b/crypto/testmgr.h
index 6044f6906bd6..004c0a0f8004 100644
--- a/crypto/testmgr.h
+++ b/crypto/testmgr.h
@@ -548,7 +548,7 @@ static const struct akcipher_testvec rsa_tv_template[] = {
548static const struct akcipher_testvec pkcs1pad_rsa_tv_template[] = { 548static const struct akcipher_testvec pkcs1pad_rsa_tv_template[] = {
549 { 549 {
550 .key = 550 .key =
551 "\x30\x82\x03\x1f\x02\x01\x10\x02\x82\x01\x01\x00\xd7\x1e\x77\x82" 551 "\x30\x82\x03\x1f\x02\x01\x00\x02\x82\x01\x01\x00\xd7\x1e\x77\x82"
552 "\x8c\x92\x31\xe7\x69\x02\xa2\xd5\x5c\x78\xde\xa2\x0c\x8f\xfe\x28" 552 "\x8c\x92\x31\xe7\x69\x02\xa2\xd5\x5c\x78\xde\xa2\x0c\x8f\xfe\x28"
553 "\x59\x31\xdf\x40\x9c\x60\x61\x06\xb9\x2f\x62\x40\x80\x76\xcb\x67" 553 "\x59\x31\xdf\x40\x9c\x60\x61\x06\xb9\x2f\x62\x40\x80\x76\xcb\x67"
554 "\x4a\xb5\x59\x56\x69\x17\x07\xfa\xf9\x4c\xbd\x6c\x37\x7a\x46\x7d" 554 "\x4a\xb5\x59\x56\x69\x17\x07\xfa\xf9\x4c\xbd\x6c\x37\x7a\x46\x7d"
@@ -597,8 +597,8 @@ static const struct akcipher_testvec pkcs1pad_rsa_tv_template[] = {
597 "\xfe\xf8\x27\x1b\xd6\x55\x60\x5e\x48\xb7\x6d\x9a\xa8\x37\xf9\x7a" 597 "\xfe\xf8\x27\x1b\xd6\x55\x60\x5e\x48\xb7\x6d\x9a\xa8\x37\xf9\x7a"
598 "\xde\x1b\xcd\x5d\x1a\x30\xd4\xe9\x9e\x5b\x3c\x15\xf8\x9c\x1f\xda" 598 "\xde\x1b\xcd\x5d\x1a\x30\xd4\xe9\x9e\x5b\x3c\x15\xf8\x9c\x1f\xda"
599 "\xd1\x86\x48\x55\xce\x83\xee\x8e\x51\xc7\xde\x32\x12\x47\x7d\x46" 599 "\xd1\x86\x48\x55\xce\x83\xee\x8e\x51\xc7\xde\x32\x12\x47\x7d\x46"
600 "\xb8\x35\xdf\x41\x02\x01\x30\x02\x01\x30\x02\x01\x30\x02\x01\x30" 600 "\xb8\x35\xdf\x41\x02\x01\x00\x02\x01\x00\x02\x01\x00\x02\x01\x00"
601 "\x02\x01\x30", 601 "\x02\x01\x00",
602 .key_len = 804, 602 .key_len = 804,
603 /* 603 /*
604 * m is SHA256 hash of following message: 604 * m is SHA256 hash of following message:
@@ -2044,6 +2044,265 @@ static const struct hash_testvec crct10dif_tv_template[] = {
2044 .digest = (u8 *)(u16 []){ 0x44c6 }, 2044 .digest = (u8 *)(u16 []){ 0x44c6 },
2045 .np = 4, 2045 .np = 4,
2046 .tap = { 1, 255, 57, 6 }, 2046 .tap = { 1, 255, 57, 6 },
2047 }, {
2048 .plaintext = "\x6e\x05\x79\x10\xa7\x1b\xb2\x49"
2049 "\xe0\x54\xeb\x82\x19\x8d\x24\xbb"
2050 "\x2f\xc6\x5d\xf4\x68\xff\x96\x0a"
2051 "\xa1\x38\xcf\x43\xda\x71\x08\x7c"
2052 "\x13\xaa\x1e\xb5\x4c\xe3\x57\xee"
2053 "\x85\x1c\x90\x27\xbe\x32\xc9\x60"
2054 "\xf7\x6b\x02\x99\x0d\xa4\x3b\xd2"
2055 "\x46\xdd\x74\x0b\x7f\x16\xad\x21"
2056 "\xb8\x4f\xe6\x5a\xf1\x88\x1f\x93"
2057 "\x2a\xc1\x35\xcc\x63\xfa\x6e\x05"
2058 "\x9c\x10\xa7\x3e\xd5\x49\xe0\x77"
2059 "\x0e\x82\x19\xb0\x24\xbb\x52\xe9"
2060 "\x5d\xf4\x8b\x22\x96\x2d\xc4\x38"
2061 "\xcf\x66\xfd\x71\x08\x9f\x13\xaa"
2062 "\x41\xd8\x4c\xe3\x7a\x11\x85\x1c"
2063 "\xb3\x27\xbe\x55\xec\x60\xf7\x8e"
2064 "\x02\x99\x30\xc7\x3b\xd2\x69\x00"
2065 "\x74\x0b\xa2\x16\xad\x44\xdb\x4f"
2066 "\xe6\x7d\x14\x88\x1f\xb6\x2a\xc1"
2067 "\x58\xef\x63\xfa\x91\x05\x9c\x33"
2068 "\xca\x3e\xd5\x6c\x03\x77\x0e\xa5"
2069 "\x19\xb0\x47\xde\x52\xe9\x80\x17"
2070 "\x8b\x22\xb9\x2d\xc4\x5b\xf2\x66"
2071 "\xfd\x94\x08\x9f\x36\xcd\x41\xd8"
2072 "\x6f\x06\x7a\x11\xa8\x1c\xb3\x4a"
2073 "\xe1\x55\xec\x83\x1a\x8e\x25\xbc"
2074 "\x30\xc7\x5e\xf5\x69\x00\x97\x0b"
2075 "\xa2\x39\xd0\x44\xdb\x72\x09\x7d"
2076 "\x14\xab\x1f\xb6\x4d\xe4\x58\xef"
2077 "\x86\x1d\x91\x28\xbf\x33\xca\x61"
2078 "\xf8\x6c\x03\x9a\x0e\xa5\x3c\xd3"
2079 "\x47\xde\x75\x0c\x80\x17\xae\x22"
2080 "\xb9\x50\xe7\x5b\xf2\x89\x20\x94"
2081 "\x2b\xc2\x36\xcd\x64\xfb\x6f\x06"
2082 "\x9d\x11\xa8\x3f\xd6\x4a\xe1\x78"
2083 "\x0f\x83\x1a\xb1\x25\xbc\x53\xea"
2084 "\x5e\xf5\x8c\x00\x97\x2e\xc5\x39"
2085 "\xd0\x67\xfe\x72\x09\xa0\x14\xab"
2086 "\x42\xd9\x4d\xe4\x7b\x12\x86\x1d"
2087 "\xb4\x28\xbf\x56\xed\x61\xf8\x8f"
2088 "\x03\x9a\x31\xc8\x3c\xd3\x6a\x01"
2089 "\x75\x0c\xa3\x17\xae\x45\xdc\x50"
2090 "\xe7\x7e\x15\x89\x20\xb7\x2b\xc2"
2091 "\x59\xf0\x64\xfb\x92\x06\x9d\x34"
2092 "\xcb\x3f\xd6\x6d\x04\x78\x0f\xa6"
2093 "\x1a\xb1\x48\xdf\x53\xea\x81\x18"
2094 "\x8c\x23\xba\x2e\xc5\x5c\xf3\x67"
2095 "\xfe\x95\x09\xa0\x37\xce\x42\xd9"
2096 "\x70\x07\x7b\x12\xa9\x1d\xb4\x4b"
2097 "\xe2\x56\xed\x84\x1b\x8f\x26\xbd"
2098 "\x31\xc8\x5f\xf6\x6a\x01\x98\x0c"
2099 "\xa3\x3a\xd1\x45\xdc\x73\x0a\x7e"
2100 "\x15\xac\x20\xb7\x4e\xe5\x59\xf0"
2101 "\x87\x1e\x92\x29\xc0\x34\xcb\x62"
2102 "\xf9\x6d\x04\x9b\x0f\xa6\x3d\xd4"
2103 "\x48\xdf\x76\x0d\x81\x18\xaf\x23"
2104 "\xba\x51\xe8\x5c\xf3\x8a\x21\x95"
2105 "\x2c\xc3\x37\xce\x65\xfc\x70\x07"
2106 "\x9e\x12\xa9\x40\xd7\x4b\xe2\x79"
2107 "\x10\x84\x1b\xb2\x26\xbd\x54\xeb"
2108 "\x5f\xf6\x8d\x01\x98\x2f\xc6\x3a"
2109 "\xd1\x68\xff\x73\x0a\xa1\x15\xac"
2110 "\x43\xda\x4e\xe5\x7c\x13\x87\x1e"
2111 "\xb5\x29\xc0\x57\xee\x62\xf9\x90"
2112 "\x04\x9b\x32\xc9\x3d\xd4\x6b\x02"
2113 "\x76\x0d\xa4\x18\xaf\x46\xdd\x51"
2114 "\xe8\x7f\x16\x8a\x21\xb8\x2c\xc3"
2115 "\x5a\xf1\x65\xfc\x93\x07\x9e\x35"
2116 "\xcc\x40\xd7\x6e\x05\x79\x10\xa7"
2117 "\x1b\xb2\x49\xe0\x54\xeb\x82\x19"
2118 "\x8d\x24\xbb\x2f\xc6\x5d\xf4\x68"
2119 "\xff\x96\x0a\xa1\x38\xcf\x43\xda"
2120 "\x71\x08\x7c\x13\xaa\x1e\xb5\x4c"
2121 "\xe3\x57\xee\x85\x1c\x90\x27\xbe"
2122 "\x32\xc9\x60\xf7\x6b\x02\x99\x0d"
2123 "\xa4\x3b\xd2\x46\xdd\x74\x0b\x7f"
2124 "\x16\xad\x21\xb8\x4f\xe6\x5a\xf1"
2125 "\x88\x1f\x93\x2a\xc1\x35\xcc\x63"
2126 "\xfa\x6e\x05\x9c\x10\xa7\x3e\xd5"
2127 "\x49\xe0\x77\x0e\x82\x19\xb0\x24"
2128 "\xbb\x52\xe9\x5d\xf4\x8b\x22\x96"
2129 "\x2d\xc4\x38\xcf\x66\xfd\x71\x08"
2130 "\x9f\x13\xaa\x41\xd8\x4c\xe3\x7a"
2131 "\x11\x85\x1c\xb3\x27\xbe\x55\xec"
2132 "\x60\xf7\x8e\x02\x99\x30\xc7\x3b"
2133 "\xd2\x69\x00\x74\x0b\xa2\x16\xad"
2134 "\x44\xdb\x4f\xe6\x7d\x14\x88\x1f"
2135 "\xb6\x2a\xc1\x58\xef\x63\xfa\x91"
2136 "\x05\x9c\x33\xca\x3e\xd5\x6c\x03"
2137 "\x77\x0e\xa5\x19\xb0\x47\xde\x52"
2138 "\xe9\x80\x17\x8b\x22\xb9\x2d\xc4"
2139 "\x5b\xf2\x66\xfd\x94\x08\x9f\x36"
2140 "\xcd\x41\xd8\x6f\x06\x7a\x11\xa8"
2141 "\x1c\xb3\x4a\xe1\x55\xec\x83\x1a"
2142 "\x8e\x25\xbc\x30\xc7\x5e\xf5\x69"
2143 "\x00\x97\x0b\xa2\x39\xd0\x44\xdb"
2144 "\x72\x09\x7d\x14\xab\x1f\xb6\x4d"
2145 "\xe4\x58\xef\x86\x1d\x91\x28\xbf"
2146 "\x33\xca\x61\xf8\x6c\x03\x9a\x0e"
2147 "\xa5\x3c\xd3\x47\xde\x75\x0c\x80"
2148 "\x17\xae\x22\xb9\x50\xe7\x5b\xf2"
2149 "\x89\x20\x94\x2b\xc2\x36\xcd\x64"
2150 "\xfb\x6f\x06\x9d\x11\xa8\x3f\xd6"
2151 "\x4a\xe1\x78\x0f\x83\x1a\xb1\x25"
2152 "\xbc\x53\xea\x5e\xf5\x8c\x00\x97"
2153 "\x2e\xc5\x39\xd0\x67\xfe\x72\x09"
2154 "\xa0\x14\xab\x42\xd9\x4d\xe4\x7b"
2155 "\x12\x86\x1d\xb4\x28\xbf\x56\xed"
2156 "\x61\xf8\x8f\x03\x9a\x31\xc8\x3c"
2157 "\xd3\x6a\x01\x75\x0c\xa3\x17\xae"
2158 "\x45\xdc\x50\xe7\x7e\x15\x89\x20"
2159 "\xb7\x2b\xc2\x59\xf0\x64\xfb\x92"
2160 "\x06\x9d\x34\xcb\x3f\xd6\x6d\x04"
2161 "\x78\x0f\xa6\x1a\xb1\x48\xdf\x53"
2162 "\xea\x81\x18\x8c\x23\xba\x2e\xc5"
2163 "\x5c\xf3\x67\xfe\x95\x09\xa0\x37"
2164 "\xce\x42\xd9\x70\x07\x7b\x12\xa9"
2165 "\x1d\xb4\x4b\xe2\x56\xed\x84\x1b"
2166 "\x8f\x26\xbd\x31\xc8\x5f\xf6\x6a"
2167 "\x01\x98\x0c\xa3\x3a\xd1\x45\xdc"
2168 "\x73\x0a\x7e\x15\xac\x20\xb7\x4e"
2169 "\xe5\x59\xf0\x87\x1e\x92\x29\xc0"
2170 "\x34\xcb\x62\xf9\x6d\x04\x9b\x0f"
2171 "\xa6\x3d\xd4\x48\xdf\x76\x0d\x81"
2172 "\x18\xaf\x23\xba\x51\xe8\x5c\xf3"
2173 "\x8a\x21\x95\x2c\xc3\x37\xce\x65"
2174 "\xfc\x70\x07\x9e\x12\xa9\x40\xd7"
2175 "\x4b\xe2\x79\x10\x84\x1b\xb2\x26"
2176 "\xbd\x54\xeb\x5f\xf6\x8d\x01\x98"
2177 "\x2f\xc6\x3a\xd1\x68\xff\x73\x0a"
2178 "\xa1\x15\xac\x43\xda\x4e\xe5\x7c"
2179 "\x13\x87\x1e\xb5\x29\xc0\x57\xee"
2180 "\x62\xf9\x90\x04\x9b\x32\xc9\x3d"
2181 "\xd4\x6b\x02\x76\x0d\xa4\x18\xaf"
2182 "\x46\xdd\x51\xe8\x7f\x16\x8a\x21"
2183 "\xb8\x2c\xc3\x5a\xf1\x65\xfc\x93"
2184 "\x07\x9e\x35\xcc\x40\xd7\x6e\x05"
2185 "\x79\x10\xa7\x1b\xb2\x49\xe0\x54"
2186 "\xeb\x82\x19\x8d\x24\xbb\x2f\xc6"
2187 "\x5d\xf4\x68\xff\x96\x0a\xa1\x38"
2188 "\xcf\x43\xda\x71\x08\x7c\x13\xaa"
2189 "\x1e\xb5\x4c\xe3\x57\xee\x85\x1c"
2190 "\x90\x27\xbe\x32\xc9\x60\xf7\x6b"
2191 "\x02\x99\x0d\xa4\x3b\xd2\x46\xdd"
2192 "\x74\x0b\x7f\x16\xad\x21\xb8\x4f"
2193 "\xe6\x5a\xf1\x88\x1f\x93\x2a\xc1"
2194 "\x35\xcc\x63\xfa\x6e\x05\x9c\x10"
2195 "\xa7\x3e\xd5\x49\xe0\x77\x0e\x82"
2196 "\x19\xb0\x24\xbb\x52\xe9\x5d\xf4"
2197 "\x8b\x22\x96\x2d\xc4\x38\xcf\x66"
2198 "\xfd\x71\x08\x9f\x13\xaa\x41\xd8"
2199 "\x4c\xe3\x7a\x11\x85\x1c\xb3\x27"
2200 "\xbe\x55\xec\x60\xf7\x8e\x02\x99"
2201 "\x30\xc7\x3b\xd2\x69\x00\x74\x0b"
2202 "\xa2\x16\xad\x44\xdb\x4f\xe6\x7d"
2203 "\x14\x88\x1f\xb6\x2a\xc1\x58\xef"
2204 "\x63\xfa\x91\x05\x9c\x33\xca\x3e"
2205 "\xd5\x6c\x03\x77\x0e\xa5\x19\xb0"
2206 "\x47\xde\x52\xe9\x80\x17\x8b\x22"
2207 "\xb9\x2d\xc4\x5b\xf2\x66\xfd\x94"
2208 "\x08\x9f\x36\xcd\x41\xd8\x6f\x06"
2209 "\x7a\x11\xa8\x1c\xb3\x4a\xe1\x55"
2210 "\xec\x83\x1a\x8e\x25\xbc\x30\xc7"
2211 "\x5e\xf5\x69\x00\x97\x0b\xa2\x39"
2212 "\xd0\x44\xdb\x72\x09\x7d\x14\xab"
2213 "\x1f\xb6\x4d\xe4\x58\xef\x86\x1d"
2214 "\x91\x28\xbf\x33\xca\x61\xf8\x6c"
2215 "\x03\x9a\x0e\xa5\x3c\xd3\x47\xde"
2216 "\x75\x0c\x80\x17\xae\x22\xb9\x50"
2217 "\xe7\x5b\xf2\x89\x20\x94\x2b\xc2"
2218 "\x36\xcd\x64\xfb\x6f\x06\x9d\x11"
2219 "\xa8\x3f\xd6\x4a\xe1\x78\x0f\x83"
2220 "\x1a\xb1\x25\xbc\x53\xea\x5e\xf5"
2221 "\x8c\x00\x97\x2e\xc5\x39\xd0\x67"
2222 "\xfe\x72\x09\xa0\x14\xab\x42\xd9"
2223 "\x4d\xe4\x7b\x12\x86\x1d\xb4\x28"
2224 "\xbf\x56\xed\x61\xf8\x8f\x03\x9a"
2225 "\x31\xc8\x3c\xd3\x6a\x01\x75\x0c"
2226 "\xa3\x17\xae\x45\xdc\x50\xe7\x7e"
2227 "\x15\x89\x20\xb7\x2b\xc2\x59\xf0"
2228 "\x64\xfb\x92\x06\x9d\x34\xcb\x3f"
2229 "\xd6\x6d\x04\x78\x0f\xa6\x1a\xb1"
2230 "\x48\xdf\x53\xea\x81\x18\x8c\x23"
2231 "\xba\x2e\xc5\x5c\xf3\x67\xfe\x95"
2232 "\x09\xa0\x37\xce\x42\xd9\x70\x07"
2233 "\x7b\x12\xa9\x1d\xb4\x4b\xe2\x56"
2234 "\xed\x84\x1b\x8f\x26\xbd\x31\xc8"
2235 "\x5f\xf6\x6a\x01\x98\x0c\xa3\x3a"
2236 "\xd1\x45\xdc\x73\x0a\x7e\x15\xac"
2237 "\x20\xb7\x4e\xe5\x59\xf0\x87\x1e"
2238 "\x92\x29\xc0\x34\xcb\x62\xf9\x6d"
2239 "\x04\x9b\x0f\xa6\x3d\xd4\x48\xdf"
2240 "\x76\x0d\x81\x18\xaf\x23\xba\x51"
2241 "\xe8\x5c\xf3\x8a\x21\x95\x2c\xc3"
2242 "\x37\xce\x65\xfc\x70\x07\x9e\x12"
2243 "\xa9\x40\xd7\x4b\xe2\x79\x10\x84"
2244 "\x1b\xb2\x26\xbd\x54\xeb\x5f\xf6"
2245 "\x8d\x01\x98\x2f\xc6\x3a\xd1\x68"
2246 "\xff\x73\x0a\xa1\x15\xac\x43\xda"
2247 "\x4e\xe5\x7c\x13\x87\x1e\xb5\x29"
2248 "\xc0\x57\xee\x62\xf9\x90\x04\x9b"
2249 "\x32\xc9\x3d\xd4\x6b\x02\x76\x0d"
2250 "\xa4\x18\xaf\x46\xdd\x51\xe8\x7f"
2251 "\x16\x8a\x21\xb8\x2c\xc3\x5a\xf1"
2252 "\x65\xfc\x93\x07\x9e\x35\xcc\x40"
2253 "\xd7\x6e\x05\x79\x10\xa7\x1b\xb2"
2254 "\x49\xe0\x54\xeb\x82\x19\x8d\x24"
2255 "\xbb\x2f\xc6\x5d\xf4\x68\xff\x96"
2256 "\x0a\xa1\x38\xcf\x43\xda\x71\x08"
2257 "\x7c\x13\xaa\x1e\xb5\x4c\xe3\x57"
2258 "\xee\x85\x1c\x90\x27\xbe\x32\xc9"
2259 "\x60\xf7\x6b\x02\x99\x0d\xa4\x3b"
2260 "\xd2\x46\xdd\x74\x0b\x7f\x16\xad"
2261 "\x21\xb8\x4f\xe6\x5a\xf1\x88\x1f"
2262 "\x93\x2a\xc1\x35\xcc\x63\xfa\x6e"
2263 "\x05\x9c\x10\xa7\x3e\xd5\x49\xe0"
2264 "\x77\x0e\x82\x19\xb0\x24\xbb\x52"
2265 "\xe9\x5d\xf4\x8b\x22\x96\x2d\xc4"
2266 "\x38\xcf\x66\xfd\x71\x08\x9f\x13"
2267 "\xaa\x41\xd8\x4c\xe3\x7a\x11\x85"
2268 "\x1c\xb3\x27\xbe\x55\xec\x60\xf7"
2269 "\x8e\x02\x99\x30\xc7\x3b\xd2\x69"
2270 "\x00\x74\x0b\xa2\x16\xad\x44\xdb"
2271 "\x4f\xe6\x7d\x14\x88\x1f\xb6\x2a"
2272 "\xc1\x58\xef\x63\xfa\x91\x05\x9c"
2273 "\x33\xca\x3e\xd5\x6c\x03\x77\x0e"
2274 "\xa5\x19\xb0\x47\xde\x52\xe9\x80"
2275 "\x17\x8b\x22\xb9\x2d\xc4\x5b\xf2"
2276 "\x66\xfd\x94\x08\x9f\x36\xcd\x41"
2277 "\xd8\x6f\x06\x7a\x11\xa8\x1c\xb3"
2278 "\x4a\xe1\x55\xec\x83\x1a\x8e\x25"
2279 "\xbc\x30\xc7\x5e\xf5\x69\x00\x97"
2280 "\x0b\xa2\x39\xd0\x44\xdb\x72\x09"
2281 "\x7d\x14\xab\x1f\xb6\x4d\xe4\x58"
2282 "\xef\x86\x1d\x91\x28\xbf\x33\xca"
2283 "\x61\xf8\x6c\x03\x9a\x0e\xa5\x3c"
2284 "\xd3\x47\xde\x75\x0c\x80\x17\xae"
2285 "\x22\xb9\x50\xe7\x5b\xf2\x89\x20"
2286 "\x94\x2b\xc2\x36\xcd\x64\xfb\x6f"
2287 "\x06\x9d\x11\xa8\x3f\xd6\x4a\xe1"
2288 "\x78\x0f\x83\x1a\xb1\x25\xbc\x53"
2289 "\xea\x5e\xf5\x8c\x00\x97\x2e\xc5"
2290 "\x39\xd0\x67\xfe\x72\x09\xa0\x14"
2291 "\xab\x42\xd9\x4d\xe4\x7b\x12\x86"
2292 "\x1d\xb4\x28\xbf\x56\xed\x61\xf8"
2293 "\x8f\x03\x9a\x31\xc8\x3c\xd3\x6a"
2294 "\x01\x75\x0c\xa3\x17\xae\x45\xdc"
2295 "\x50\xe7\x7e\x15\x89\x20\xb7\x2b"
2296 "\xc2\x59\xf0\x64\xfb\x92\x06\x9d"
2297 "\x34\xcb\x3f\xd6\x6d\x04\x78\x0f"
2298 "\xa6\x1a\xb1\x48\xdf\x53\xea\x81"
2299 "\x18\x8c\x23\xba\x2e\xc5\x5c\xf3"
2300 "\x67\xfe\x95\x09\xa0\x37\xce\x42"
2301 "\xd9\x70\x07\x7b\x12\xa9\x1d\xb4"
2302 "\x4b\xe2\x56\xed\x84\x1b\x8f\x26"
2303 "\xbd\x31\xc8\x5f\xf6\x6a\x01\x98",
2304 .psize = 2048,
2305 .digest = (u8 *)(u16 []){ 0x23ca },
2047 } 2306 }
2048}; 2307};
2049 2308
@@ -14323,6 +14582,1623 @@ static const struct cipher_testvec serpent_xts_dec_tv_template[] = {
14323 }, 14582 },
14324}; 14583};
14325 14584
14585/*
14586 * SM4 test vector taken from the draft RFC
14587 * https://tools.ietf.org/html/draft-crypto-sm4-00#ref-GBT.32907-2016
14588 */
14589
14590static const struct cipher_testvec sm4_enc_tv_template[] = {
14591 { /* SM4 Appendix A: Example Calculations. Example 1. */
14592 .key = "\x01\x23\x45\x67\x89\xAB\xCD\xEF"
14593 "\xFE\xDC\xBA\x98\x76\x54\x32\x10",
14594 .klen = 16,
14595 .input = "\x01\x23\x45\x67\x89\xAB\xCD\xEF"
14596 "\xFE\xDC\xBA\x98\x76\x54\x32\x10",
14597 .ilen = 16,
14598 .result = "\x68\x1E\xDF\x34\xD2\x06\x96\x5E"
14599 "\x86\xB3\xE9\x4F\x53\x6E\x42\x46",
14600 .rlen = 16,
14601 }, { /*
14602 * SM4 Appendix A: Example Calculations.
14603 * Last 10 iterations of Example 2.
14604 */
14605 .key = "\x01\x23\x45\x67\x89\xAB\xCD\xEF"
14606 "\xFE\xDC\xBA\x98\x76\x54\x32\x10",
14607 .klen = 16,
14608 .input = "\x99\x4a\xc3\xe7\xc3\x57\x89\x6a"
14609 "\x81\xfc\xa8\xe\x38\x3e\xef\x80"
14610 "\xb1\x98\xf2\xde\x3f\x4b\xae\xd1"
14611 "\xf0\xf1\x30\x4c\x1\x27\x5a\x8f"
14612 "\x45\xe1\x39\xb7\xae\xff\x1f\x27"
14613 "\xad\x57\x15\xab\x31\x5d\xc\xef"
14614 "\x8c\xc8\x80\xbd\x11\x98\xf3\x7b"
14615 "\xa2\xdd\x14\x20\xf9\xe8\xbb\x82"
14616 "\xf7\x32\xca\x4b\xa8\xf7\xb3\x4d"
14617 "\x27\xd1\xcd\xe6\xb6\x65\x5a\x23"
14618 "\xc2\xf3\x54\x84\x53\xe3\xb9\x20"
14619 "\xa5\x37\x0\xbe\xe7\x7b\x48\xfb"
14620 "\x21\x3d\x9e\x48\x1d\x9e\xf5\xbf"
14621 "\x77\xd5\xb4\x4a\x53\x71\x94\x7a"
14622 "\x88\xa6\x6e\x6\x93\xca\x43\xa5"
14623 "\xc4\xf6\xcd\x53\x4b\x7b\x8e\xfe"
14624 "\xb4\x28\x7c\x42\x29\x32\x5d\x88"
14625 "\xed\xce\x0\x19\xe\x16\x2\x6e"
14626 "\x87\xff\x2c\xac\xe8\xe7\xe9\xbf"
14627 "\x31\x51\xec\x47\xc3\x51\x83\xc1",
14628 .ilen = 160,
14629 .result = "\xb1\x98\xf2\xde\x3f\x4b\xae\xd1"
14630 "\xf0\xf1\x30\x4c\x1\x27\x5a\x8f"
14631 "\x45\xe1\x39\xb7\xae\xff\x1f\x27"
14632 "\xad\x57\x15\xab\x31\x5d\xc\xef"
14633 "\x8c\xc8\x80\xbd\x11\x98\xf3\x7b"
14634 "\xa2\xdd\x14\x20\xf9\xe8\xbb\x82"
14635 "\xf7\x32\xca\x4b\xa8\xf7\xb3\x4d"
14636 "\x27\xd1\xcd\xe6\xb6\x65\x5a\x23"
14637 "\xc2\xf3\x54\x84\x53\xe3\xb9\x20"
14638 "\xa5\x37\x0\xbe\xe7\x7b\x48\xfb"
14639 "\x21\x3d\x9e\x48\x1d\x9e\xf5\xbf"
14640 "\x77\xd5\xb4\x4a\x53\x71\x94\x7a"
14641 "\x88\xa6\x6e\x6\x93\xca\x43\xa5"
14642 "\xc4\xf6\xcd\x53\x4b\x7b\x8e\xfe"
14643 "\xb4\x28\x7c\x42\x29\x32\x5d\x88"
14644 "\xed\xce\x0\x19\xe\x16\x2\x6e"
14645 "\x87\xff\x2c\xac\xe8\xe7\xe9\xbf"
14646 "\x31\x51\xec\x47\xc3\x51\x83\xc1"
14647 "\x59\x52\x98\xc7\xc6\xfd\x27\x1f"
14648 "\x4\x2\xf8\x4\xc3\x3d\x3f\x66",
14649 .rlen = 160
14650 }
14651};
14652
14653static const struct cipher_testvec sm4_dec_tv_template[] = {
14654 { /* SM4 Appendix A: Example Calculations. Example 1. */
14655 .key = "\x01\x23\x45\x67\x89\xAB\xCD\xEF"
14656 "\xFE\xDC\xBA\x98\x76\x54\x32\x10",
14657 .klen = 16,
14658 .input = "\x68\x1E\xDF\x34\xD2\x06\x96\x5E"
14659 "\x86\xB3\xE9\x4F\x53\x6E\x42\x46",
14660 .ilen = 16,
14661 .result = "\x01\x23\x45\x67\x89\xAB\xCD\xEF"
14662 "\xFE\xDC\xBA\x98\x76\x54\x32\x10",
14663 .rlen = 16,
14664 }, { /*
14665 * SM4 Appendix A: Example Calculations.
14666 * Last 10 iterations of Example 2.
14667 */
14668 .key = "\x01\x23\x45\x67\x89\xAB\xCD\xEF"
14669 "\xFE\xDC\xBA\x98\x76\x54\x32\x10",
14670 .klen = 16,
14671 .input = "\xb1\x98\xf2\xde\x3f\x4b\xae\xd1"
14672 "\xf0\xf1\x30\x4c\x1\x27\x5a\x8f"
14673 "\x45\xe1\x39\xb7\xae\xff\x1f\x27"
14674 "\xad\x57\x15\xab\x31\x5d\xc\xef"
14675 "\x8c\xc8\x80\xbd\x11\x98\xf3\x7b"
14676 "\xa2\xdd\x14\x20\xf9\xe8\xbb\x82"
14677 "\xf7\x32\xca\x4b\xa8\xf7\xb3\x4d"
14678 "\x27\xd1\xcd\xe6\xb6\x65\x5a\x23"
14679 "\xc2\xf3\x54\x84\x53\xe3\xb9\x20"
14680 "\xa5\x37\x0\xbe\xe7\x7b\x48\xfb"
14681 "\x21\x3d\x9e\x48\x1d\x9e\xf5\xbf"
14682 "\x77\xd5\xb4\x4a\x53\x71\x94\x7a"
14683 "\x88\xa6\x6e\x6\x93\xca\x43\xa5"
14684 "\xc4\xf6\xcd\x53\x4b\x7b\x8e\xfe"
14685 "\xb4\x28\x7c\x42\x29\x32\x5d\x88"
14686 "\xed\xce\x0\x19\xe\x16\x2\x6e"
14687 "\x87\xff\x2c\xac\xe8\xe7\xe9\xbf"
14688 "\x31\x51\xec\x47\xc3\x51\x83\xc1"
14689 "\x59\x52\x98\xc7\xc6\xfd\x27\x1f"
14690 "\x4\x2\xf8\x4\xc3\x3d\x3f\x66",
14691 .ilen = 160,
14692 .result = "\x99\x4a\xc3\xe7\xc3\x57\x89\x6a"
14693 "\x81\xfc\xa8\xe\x38\x3e\xef\x80"
14694 "\xb1\x98\xf2\xde\x3f\x4b\xae\xd1"
14695 "\xf0\xf1\x30\x4c\x1\x27\x5a\x8f"
14696 "\x45\xe1\x39\xb7\xae\xff\x1f\x27"
14697 "\xad\x57\x15\xab\x31\x5d\xc\xef"
14698 "\x8c\xc8\x80\xbd\x11\x98\xf3\x7b"
14699 "\xa2\xdd\x14\x20\xf9\xe8\xbb\x82"
14700 "\xf7\x32\xca\x4b\xa8\xf7\xb3\x4d"
14701 "\x27\xd1\xcd\xe6\xb6\x65\x5a\x23"
14702 "\xc2\xf3\x54\x84\x53\xe3\xb9\x20"
14703 "\xa5\x37\x0\xbe\xe7\x7b\x48\xfb"
14704 "\x21\x3d\x9e\x48\x1d\x9e\xf5\xbf"
14705 "\x77\xd5\xb4\x4a\x53\x71\x94\x7a"
14706 "\x88\xa6\x6e\x6\x93\xca\x43\xa5"
14707 "\xc4\xf6\xcd\x53\x4b\x7b\x8e\xfe"
14708 "\xb4\x28\x7c\x42\x29\x32\x5d\x88"
14709 "\xed\xce\x0\x19\xe\x16\x2\x6e"
14710 "\x87\xff\x2c\xac\xe8\xe7\xe9\xbf"
14711 "\x31\x51\xec\x47\xc3\x51\x83\xc1",
14712 .rlen = 160
14713 }
14714};
14715
14716/*
14717 * Speck test vectors taken from the original paper:
14718 * "The Simon and Speck Families of Lightweight Block Ciphers"
14719 * https://eprint.iacr.org/2013/404.pdf
14720 *
14721 * Note that the paper does not make byte and word order clear. But it was
14722 * confirmed with the authors that the intended orders are little endian byte
14723 * order and (y, x) word order. Equivalently, the printed test vectors, when
14724 * looking at only the bytes (ignoring the whitespace that divides them into
14725 * words), are backwards: the left-most byte is actually the one with the
14726 * highest memory address, while the right-most byte is actually the one with
14727 * the lowest memory address.
14728 */
14729
14730static const struct cipher_testvec speck128_enc_tv_template[] = {
14731 { /* Speck128/128 */
14732 .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
14733 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
14734 .klen = 16,
14735 .input = "\x20\x6d\x61\x64\x65\x20\x69\x74"
14736 "\x20\x65\x71\x75\x69\x76\x61\x6c",
14737 .ilen = 16,
14738 .result = "\x18\x0d\x57\x5c\xdf\xfe\x60\x78"
14739 "\x65\x32\x78\x79\x51\x98\x5d\xa6",
14740 .rlen = 16,
14741 }, { /* Speck128/192 */
14742 .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
14743 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
14744 "\x10\x11\x12\x13\x14\x15\x16\x17",
14745 .klen = 24,
14746 .input = "\x65\x6e\x74\x20\x74\x6f\x20\x43"
14747 "\x68\x69\x65\x66\x20\x48\x61\x72",
14748 .ilen = 16,
14749 .result = "\x86\x18\x3c\xe0\x5d\x18\xbc\xf9"
14750 "\x66\x55\x13\x13\x3a\xcf\xe4\x1b",
14751 .rlen = 16,
14752 }, { /* Speck128/256 */
14753 .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
14754 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
14755 "\x10\x11\x12\x13\x14\x15\x16\x17"
14756 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f",
14757 .klen = 32,
14758 .input = "\x70\x6f\x6f\x6e\x65\x72\x2e\x20"
14759 "\x49\x6e\x20\x74\x68\x6f\x73\x65",
14760 .ilen = 16,
14761 .result = "\x43\x8f\x18\x9c\x8d\xb4\xee\x4e"
14762 "\x3e\xf5\xc0\x05\x04\x01\x09\x41",
14763 .rlen = 16,
14764 },
14765};
14766
14767static const struct cipher_testvec speck128_dec_tv_template[] = {
14768 { /* Speck128/128 */
14769 .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
14770 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f",
14771 .klen = 16,
14772 .input = "\x18\x0d\x57\x5c\xdf\xfe\x60\x78"
14773 "\x65\x32\x78\x79\x51\x98\x5d\xa6",
14774 .ilen = 16,
14775 .result = "\x20\x6d\x61\x64\x65\x20\x69\x74"
14776 "\x20\x65\x71\x75\x69\x76\x61\x6c",
14777 .rlen = 16,
14778 }, { /* Speck128/192 */
14779 .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
14780 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
14781 "\x10\x11\x12\x13\x14\x15\x16\x17",
14782 .klen = 24,
14783 .input = "\x86\x18\x3c\xe0\x5d\x18\xbc\xf9"
14784 "\x66\x55\x13\x13\x3a\xcf\xe4\x1b",
14785 .ilen = 16,
14786 .result = "\x65\x6e\x74\x20\x74\x6f\x20\x43"
14787 "\x68\x69\x65\x66\x20\x48\x61\x72",
14788 .rlen = 16,
14789 }, { /* Speck128/256 */
14790 .key = "\x00\x01\x02\x03\x04\x05\x06\x07"
14791 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
14792 "\x10\x11\x12\x13\x14\x15\x16\x17"
14793 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f",
14794 .klen = 32,
14795 .input = "\x43\x8f\x18\x9c\x8d\xb4\xee\x4e"
14796 "\x3e\xf5\xc0\x05\x04\x01\x09\x41",
14797 .ilen = 16,
14798 .result = "\x70\x6f\x6f\x6e\x65\x72\x2e\x20"
14799 "\x49\x6e\x20\x74\x68\x6f\x73\x65",
14800 .rlen = 16,
14801 },
14802};
14803
14804/*
14805 * Speck128-XTS test vectors, taken from the AES-XTS test vectors with the
14806 * result recomputed with Speck128 as the cipher
14807 */
14808
14809static const struct cipher_testvec speck128_xts_enc_tv_template[] = {
14810 {
14811 .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
14812 "\x00\x00\x00\x00\x00\x00\x00\x00"
14813 "\x00\x00\x00\x00\x00\x00\x00\x00"
14814 "\x00\x00\x00\x00\x00\x00\x00\x00",
14815 .klen = 32,
14816 .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
14817 "\x00\x00\x00\x00\x00\x00\x00\x00",
14818 .input = "\x00\x00\x00\x00\x00\x00\x00\x00"
14819 "\x00\x00\x00\x00\x00\x00\x00\x00"
14820 "\x00\x00\x00\x00\x00\x00\x00\x00"
14821 "\x00\x00\x00\x00\x00\x00\x00\x00",
14822 .ilen = 32,
14823 .result = "\xbe\xa0\xe7\x03\xd7\xfe\xab\x62"
14824 "\x3b\x99\x4a\x64\x74\x77\xac\xed"
14825 "\xd8\xf4\xa6\xcf\xae\xb9\x07\x42"
14826 "\x51\xd9\xb6\x1d\xe0\x5e\xbc\x54",
14827 .rlen = 32,
14828 }, {
14829 .key = "\x11\x11\x11\x11\x11\x11\x11\x11"
14830 "\x11\x11\x11\x11\x11\x11\x11\x11"
14831 "\x22\x22\x22\x22\x22\x22\x22\x22"
14832 "\x22\x22\x22\x22\x22\x22\x22\x22",
14833 .klen = 32,
14834 .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
14835 "\x00\x00\x00\x00\x00\x00\x00\x00",
14836 .input = "\x44\x44\x44\x44\x44\x44\x44\x44"
14837 "\x44\x44\x44\x44\x44\x44\x44\x44"
14838 "\x44\x44\x44\x44\x44\x44\x44\x44"
14839 "\x44\x44\x44\x44\x44\x44\x44\x44",
14840 .ilen = 32,
14841 .result = "\xfb\x53\x81\x75\x6f\x9f\x34\xad"
14842 "\x7e\x01\xed\x7b\xcc\xda\x4e\x4a"
14843 "\xd4\x84\xa4\x53\xd5\x88\x73\x1b"
14844 "\xfd\xcb\xae\x0d\xf3\x04\xee\xe6",
14845 .rlen = 32,
14846 }, {
14847 .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8"
14848 "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0"
14849 "\x22\x22\x22\x22\x22\x22\x22\x22"
14850 "\x22\x22\x22\x22\x22\x22\x22\x22",
14851 .klen = 32,
14852 .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
14853 "\x00\x00\x00\x00\x00\x00\x00\x00",
14854 .input = "\x44\x44\x44\x44\x44\x44\x44\x44"
14855 "\x44\x44\x44\x44\x44\x44\x44\x44"
14856 "\x44\x44\x44\x44\x44\x44\x44\x44"
14857 "\x44\x44\x44\x44\x44\x44\x44\x44",
14858 .ilen = 32,
14859 .result = "\x21\x52\x84\x15\xd1\xf7\x21\x55"
14860 "\xd9\x75\x4a\xd3\xc5\xdb\x9f\x7d"
14861 "\xda\x63\xb2\xf1\x82\xb0\x89\x59"
14862 "\x86\xd4\xaa\xaa\xdd\xff\x4f\x92",
14863 .rlen = 32,
14864 }, {
14865 .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
14866 "\x23\x53\x60\x28\x74\x71\x35\x26"
14867 "\x31\x41\x59\x26\x53\x58\x97\x93"
14868 "\x23\x84\x62\x64\x33\x83\x27\x95",
14869 .klen = 32,
14870 .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
14871 "\x00\x00\x00\x00\x00\x00\x00\x00",
14872 .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
14873 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
14874 "\x10\x11\x12\x13\x14\x15\x16\x17"
14875 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
14876 "\x20\x21\x22\x23\x24\x25\x26\x27"
14877 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
14878 "\x30\x31\x32\x33\x34\x35\x36\x37"
14879 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
14880 "\x40\x41\x42\x43\x44\x45\x46\x47"
14881 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
14882 "\x50\x51\x52\x53\x54\x55\x56\x57"
14883 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
14884 "\x60\x61\x62\x63\x64\x65\x66\x67"
14885 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
14886 "\x70\x71\x72\x73\x74\x75\x76\x77"
14887 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
14888 "\x80\x81\x82\x83\x84\x85\x86\x87"
14889 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
14890 "\x90\x91\x92\x93\x94\x95\x96\x97"
14891 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
14892 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
14893 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
14894 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
14895 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
14896 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
14897 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
14898 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
14899 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
14900 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
14901 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
14902 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
14903 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
14904 "\x00\x01\x02\x03\x04\x05\x06\x07"
14905 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
14906 "\x10\x11\x12\x13\x14\x15\x16\x17"
14907 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
14908 "\x20\x21\x22\x23\x24\x25\x26\x27"
14909 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
14910 "\x30\x31\x32\x33\x34\x35\x36\x37"
14911 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
14912 "\x40\x41\x42\x43\x44\x45\x46\x47"
14913 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
14914 "\x50\x51\x52\x53\x54\x55\x56\x57"
14915 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
14916 "\x60\x61\x62\x63\x64\x65\x66\x67"
14917 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
14918 "\x70\x71\x72\x73\x74\x75\x76\x77"
14919 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
14920 "\x80\x81\x82\x83\x84\x85\x86\x87"
14921 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
14922 "\x90\x91\x92\x93\x94\x95\x96\x97"
14923 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
14924 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
14925 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
14926 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
14927 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
14928 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
14929 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
14930 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
14931 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
14932 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
14933 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
14934 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
14935 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
14936 .ilen = 512,
14937 .result = "\x57\xb5\xf8\x71\x6e\x6d\xdd\x82"
14938 "\x53\xd0\xed\x2d\x30\xc1\x20\xef"
14939 "\x70\x67\x5e\xff\x09\x70\xbb\xc1"
14940 "\x3a\x7b\x48\x26\xd9\x0b\xf4\x48"
14941 "\xbe\xce\xb1\xc7\xb2\x67\xc4\xa7"
14942 "\x76\xf8\x36\x30\xb7\xb4\x9a\xd9"
14943 "\xf5\x9d\xd0\x7b\xc1\x06\x96\x44"
14944 "\x19\xc5\x58\x84\x63\xb9\x12\x68"
14945 "\x68\xc7\xaa\x18\x98\xf2\x1f\x5c"
14946 "\x39\xa6\xd8\x32\x2b\xc3\x51\xfd"
14947 "\x74\x79\x2e\xb4\x44\xd7\x69\xc4"
14948 "\xfc\x29\xe6\xed\x26\x1e\xa6\x9d"
14949 "\x1c\xbe\x00\x0e\x7f\x3a\xca\xfb"
14950 "\x6d\x13\x65\xa0\xf9\x31\x12\xe2"
14951 "\x26\xd1\xec\x2b\x0a\x8b\x59\x99"
14952 "\xa7\x49\xa0\x0e\x09\x33\x85\x50"
14953 "\xc3\x23\xca\x7a\xdd\x13\x45\x5f"
14954 "\xde\x4c\xa7\xcb\x00\x8a\x66\x6f"
14955 "\xa2\xb6\xb1\x2e\xe1\xa0\x18\xf6"
14956 "\xad\xf3\xbd\xeb\xc7\xef\x55\x4f"
14957 "\x79\x91\x8d\x36\x13\x7b\xd0\x4a"
14958 "\x6c\x39\xfb\x53\xb8\x6f\x02\x51"
14959 "\xa5\x20\xac\x24\x1c\x73\x59\x73"
14960 "\x58\x61\x3a\x87\x58\xb3\x20\x56"
14961 "\x39\x06\x2b\x4d\xd3\x20\x2b\x89"
14962 "\x3f\xa2\xf0\x96\xeb\x7f\xa4\xcd"
14963 "\x11\xae\xbd\xcb\x3a\xb4\xd9\x91"
14964 "\x09\x35\x71\x50\x65\xac\x92\xe3"
14965 "\x7b\x32\xc0\x7a\xdd\xd4\xc3\x92"
14966 "\x6f\xeb\x79\xde\x6f\xd3\x25\xc9"
14967 "\xcd\x63\xf5\x1e\x7a\x3b\x26\x9d"
14968 "\x77\x04\x80\xa9\xbf\x38\xb5\xbd"
14969 "\xb8\x05\x07\xbd\xfd\xab\x7b\xf8"
14970 "\x2a\x26\xcc\x49\x14\x6d\x55\x01"
14971 "\x06\x94\xd8\xb2\x2d\x53\x83\x1b"
14972 "\x8f\xd4\xdd\x57\x12\x7e\x18\xba"
14973 "\x8e\xe2\x4d\x80\xef\x7e\x6b\x9d"
14974 "\x24\xa9\x60\xa4\x97\x85\x86\x2a"
14975 "\x01\x00\x09\xf1\xcb\x4a\x24\x1c"
14976 "\xd8\xf6\xe6\x5b\xe7\x5d\xf2\xc4"
14977 "\x97\x1c\x10\xc6\x4d\x66\x4f\x98"
14978 "\x87\x30\xac\xd5\xea\x73\x49\x10"
14979 "\x80\xea\xe5\x5f\x4d\x5f\x03\x33"
14980 "\x66\x02\x35\x3d\x60\x06\x36\x4f"
14981 "\x14\x1c\xd8\x07\x1f\x78\xd0\xf8"
14982 "\x4f\x6c\x62\x7c\x15\xa5\x7c\x28"
14983 "\x7c\xcc\xeb\x1f\xd1\x07\x90\x93"
14984 "\x7e\xc2\xa8\x3a\x80\xc0\xf5\x30"
14985 "\xcc\x75\xcf\x16\x26\xa9\x26\x3b"
14986 "\xe7\x68\x2f\x15\x21\x5b\xe4\x00"
14987 "\xbd\x48\x50\xcd\x75\x70\xc4\x62"
14988 "\xbb\x41\xfb\x89\x4a\x88\x3b\x3b"
14989 "\x51\x66\x02\x69\x04\x97\x36\xd4"
14990 "\x75\xae\x0b\xa3\x42\xf8\xca\x79"
14991 "\x8f\x93\xe9\xcc\x38\xbd\xd6\xd2"
14992 "\xf9\x70\x4e\xc3\x6a\x8e\x25\xbd"
14993 "\xea\x15\x5a\xa0\x85\x7e\x81\x0d"
14994 "\x03\xe7\x05\x39\xf5\x05\x26\xee"
14995 "\xec\xaa\x1f\x3d\xc9\x98\x76\x01"
14996 "\x2c\xf4\xfc\xa3\x88\x77\x38\xc4"
14997 "\x50\x65\x50\x6d\x04\x1f\xdf\x5a"
14998 "\xaa\xf2\x01\xa9\xc1\x8d\xee\xca"
14999 "\x47\x26\xef\x39\xb8\xb4\xf2\xd1"
15000 "\xd6\xbb\x1b\x2a\xc1\x34\x14\xcf",
15001 .rlen = 512,
15002 }, {
15003 .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
15004 "\x23\x53\x60\x28\x74\x71\x35\x26"
15005 "\x62\x49\x77\x57\x24\x70\x93\x69"
15006 "\x99\x59\x57\x49\x66\x96\x76\x27"
15007 "\x31\x41\x59\x26\x53\x58\x97\x93"
15008 "\x23\x84\x62\x64\x33\x83\x27\x95"
15009 "\x02\x88\x41\x97\x16\x93\x99\x37"
15010 "\x51\x05\x82\x09\x74\x94\x45\x92",
15011 .klen = 64,
15012 .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
15013 "\x00\x00\x00\x00\x00\x00\x00\x00",
15014 .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
15015 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
15016 "\x10\x11\x12\x13\x14\x15\x16\x17"
15017 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
15018 "\x20\x21\x22\x23\x24\x25\x26\x27"
15019 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
15020 "\x30\x31\x32\x33\x34\x35\x36\x37"
15021 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
15022 "\x40\x41\x42\x43\x44\x45\x46\x47"
15023 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
15024 "\x50\x51\x52\x53\x54\x55\x56\x57"
15025 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
15026 "\x60\x61\x62\x63\x64\x65\x66\x67"
15027 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
15028 "\x70\x71\x72\x73\x74\x75\x76\x77"
15029 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
15030 "\x80\x81\x82\x83\x84\x85\x86\x87"
15031 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
15032 "\x90\x91\x92\x93\x94\x95\x96\x97"
15033 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
15034 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
15035 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
15036 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
15037 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
15038 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
15039 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
15040 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
15041 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
15042 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
15043 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
15044 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
15045 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
15046 "\x00\x01\x02\x03\x04\x05\x06\x07"
15047 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
15048 "\x10\x11\x12\x13\x14\x15\x16\x17"
15049 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
15050 "\x20\x21\x22\x23\x24\x25\x26\x27"
15051 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
15052 "\x30\x31\x32\x33\x34\x35\x36\x37"
15053 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
15054 "\x40\x41\x42\x43\x44\x45\x46\x47"
15055 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
15056 "\x50\x51\x52\x53\x54\x55\x56\x57"
15057 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
15058 "\x60\x61\x62\x63\x64\x65\x66\x67"
15059 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
15060 "\x70\x71\x72\x73\x74\x75\x76\x77"
15061 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
15062 "\x80\x81\x82\x83\x84\x85\x86\x87"
15063 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
15064 "\x90\x91\x92\x93\x94\x95\x96\x97"
15065 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
15066 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
15067 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
15068 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
15069 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
15070 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
15071 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
15072 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
15073 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
15074 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
15075 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
15076 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
15077 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
15078 .ilen = 512,
15079 .result = "\xc5\x85\x2a\x4b\x73\xe4\xf6\xf1"
15080 "\x7e\xf9\xf6\xe9\xa3\x73\x36\xcb"
15081 "\xaa\xb6\x22\xb0\x24\x6e\x3d\x73"
15082 "\x92\x99\xde\xd3\x76\xed\xcd\x63"
15083 "\x64\x3a\x22\x57\xc1\x43\x49\xd4"
15084 "\x79\x36\x31\x19\x62\xae\x10\x7e"
15085 "\x7d\xcf\x7a\xe2\x6b\xce\x27\xfa"
15086 "\xdc\x3d\xd9\x83\xd3\x42\x4c\xe0"
15087 "\x1b\xd6\x1d\x1a\x6f\xd2\x03\x00"
15088 "\xfc\x81\x99\x8a\x14\x62\xf5\x7e"
15089 "\x0d\xe7\x12\xe8\x17\x9d\x0b\xec"
15090 "\xe2\xf7\xc9\xa7\x63\xd1\x79\xb6"
15091 "\x62\x62\x37\xfe\x0a\x4c\x4a\x37"
15092 "\x70\xc7\x5e\x96\x5f\xbc\x8e\x9e"
15093 "\x85\x3c\x4f\x26\x64\x85\xbc\x68"
15094 "\xb0\xe0\x86\x5e\x26\x41\xce\x11"
15095 "\x50\xda\x97\x14\xe9\x9e\xc7\x6d"
15096 "\x3b\xdc\x43\xde\x2b\x27\x69\x7d"
15097 "\xfc\xb0\x28\xbd\x8f\xb1\xc6\x31"
15098 "\x14\x4d\xf0\x74\x37\xfd\x07\x25"
15099 "\x96\x55\xe5\xfc\x9e\x27\x2a\x74"
15100 "\x1b\x83\x4d\x15\x83\xac\x57\xa0"
15101 "\xac\xa5\xd0\x38\xef\x19\x56\x53"
15102 "\x25\x4b\xfc\xce\x04\x23\xe5\x6b"
15103 "\xf6\xc6\x6c\x32\x0b\xb3\x12\xc5"
15104 "\xed\x22\x34\x1c\x5d\xed\x17\x06"
15105 "\x36\xa3\xe6\x77\xb9\x97\x46\xb8"
15106 "\xe9\x3f\x7e\xc7\xbc\x13\x5c\xdc"
15107 "\x6e\x3f\x04\x5e\xd1\x59\xa5\x82"
15108 "\x35\x91\x3d\x1b\xe4\x97\x9f\x92"
15109 "\x1c\x5e\x5f\x6f\x41\xd4\x62\xa1"
15110 "\x8d\x39\xfc\x42\xfb\x38\x80\xb9"
15111 "\x0a\xe3\xcc\x6a\x93\xd9\x7a\xb1"
15112 "\xe9\x69\xaf\x0a\x6b\x75\x38\xa7"
15113 "\xa1\xbf\xf7\xda\x95\x93\x4b\x78"
15114 "\x19\xf5\x94\xf9\xd2\x00\x33\x37"
15115 "\xcf\xf5\x9e\x9c\xf3\xcc\xa6\xee"
15116 "\x42\xb2\x9e\x2c\x5f\x48\x23\x26"
15117 "\x15\x25\x17\x03\x3d\xfe\x2c\xfc"
15118 "\xeb\xba\xda\xe0\x00\x05\xb6\xa6"
15119 "\x07\xb3\xe8\x36\x5b\xec\x5b\xbf"
15120 "\xd6\x5b\x00\x74\xc6\x97\xf1\x6a"
15121 "\x49\xa1\xc3\xfa\x10\x52\xb9\x14"
15122 "\xad\xb7\x73\xf8\x78\x12\xc8\x59"
15123 "\x17\x80\x4c\x57\x39\xf1\x6d\x80"
15124 "\x25\x77\x0f\x5e\x7d\xf0\xaf\x21"
15125 "\xec\xce\xb7\xc8\x02\x8a\xed\x53"
15126 "\x2c\x25\x68\x2e\x1f\x85\x5e\x67"
15127 "\xd1\x07\x7a\x3a\x89\x08\xe0\x34"
15128 "\xdc\xdb\x26\xb4\x6b\x77\xfc\x40"
15129 "\x31\x15\x72\xa0\xf0\x73\xd9\x3b"
15130 "\xd5\xdb\xfe\xfc\x8f\xa9\x44\xa2"
15131 "\x09\x9f\xc6\x33\xe5\xe2\x88\xe8"
15132 "\xf3\xf0\x1a\xf4\xce\x12\x0f\xd6"
15133 "\xf7\x36\xe6\xa4\xf4\x7a\x10\x58"
15134 "\xcc\x1f\x48\x49\x65\x47\x75\xe9"
15135 "\x28\xe1\x65\x7b\xf2\xc4\xb5\x07"
15136 "\xf2\xec\x76\xd8\x8f\x09\xf3\x16"
15137 "\xa1\x51\x89\x3b\xeb\x96\x42\xac"
15138 "\x65\xe0\x67\x63\x29\xdc\xb4\x7d"
15139 "\xf2\x41\x51\x6a\xcb\xde\x3c\xfb"
15140 "\x66\x8d\x13\xca\xe0\x59\x2a\x00"
15141 "\xc9\x53\x4c\xe6\x9e\xe2\x73\xd5"
15142 "\x67\x19\xb2\xbd\x9a\x63\xd7\x5c",
15143 .rlen = 512,
15144 .also_non_np = 1,
15145 .np = 3,
15146 .tap = { 512 - 20, 4, 16 },
15147 }
15148};
15149
15150static const struct cipher_testvec speck128_xts_dec_tv_template[] = {
15151 {
15152 .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
15153 "\x00\x00\x00\x00\x00\x00\x00\x00"
15154 "\x00\x00\x00\x00\x00\x00\x00\x00"
15155 "\x00\x00\x00\x00\x00\x00\x00\x00",
15156 .klen = 32,
15157 .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
15158 "\x00\x00\x00\x00\x00\x00\x00\x00",
15159 .input = "\xbe\xa0\xe7\x03\xd7\xfe\xab\x62"
15160 "\x3b\x99\x4a\x64\x74\x77\xac\xed"
15161 "\xd8\xf4\xa6\xcf\xae\xb9\x07\x42"
15162 "\x51\xd9\xb6\x1d\xe0\x5e\xbc\x54",
15163 .ilen = 32,
15164 .result = "\x00\x00\x00\x00\x00\x00\x00\x00"
15165 "\x00\x00\x00\x00\x00\x00\x00\x00"
15166 "\x00\x00\x00\x00\x00\x00\x00\x00"
15167 "\x00\x00\x00\x00\x00\x00\x00\x00",
15168 .rlen = 32,
15169 }, {
15170 .key = "\x11\x11\x11\x11\x11\x11\x11\x11"
15171 "\x11\x11\x11\x11\x11\x11\x11\x11"
15172 "\x22\x22\x22\x22\x22\x22\x22\x22"
15173 "\x22\x22\x22\x22\x22\x22\x22\x22",
15174 .klen = 32,
15175 .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
15176 "\x00\x00\x00\x00\x00\x00\x00\x00",
15177 .input = "\xfb\x53\x81\x75\x6f\x9f\x34\xad"
15178 "\x7e\x01\xed\x7b\xcc\xda\x4e\x4a"
15179 "\xd4\x84\xa4\x53\xd5\x88\x73\x1b"
15180 "\xfd\xcb\xae\x0d\xf3\x04\xee\xe6",
15181 .ilen = 32,
15182 .result = "\x44\x44\x44\x44\x44\x44\x44\x44"
15183 "\x44\x44\x44\x44\x44\x44\x44\x44"
15184 "\x44\x44\x44\x44\x44\x44\x44\x44"
15185 "\x44\x44\x44\x44\x44\x44\x44\x44",
15186 .rlen = 32,
15187 }, {
15188 .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8"
15189 "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0"
15190 "\x22\x22\x22\x22\x22\x22\x22\x22"
15191 "\x22\x22\x22\x22\x22\x22\x22\x22",
15192 .klen = 32,
15193 .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
15194 "\x00\x00\x00\x00\x00\x00\x00\x00",
15195 .input = "\x21\x52\x84\x15\xd1\xf7\x21\x55"
15196 "\xd9\x75\x4a\xd3\xc5\xdb\x9f\x7d"
15197 "\xda\x63\xb2\xf1\x82\xb0\x89\x59"
15198 "\x86\xd4\xaa\xaa\xdd\xff\x4f\x92",
15199 .ilen = 32,
15200 .result = "\x44\x44\x44\x44\x44\x44\x44\x44"
15201 "\x44\x44\x44\x44\x44\x44\x44\x44"
15202 "\x44\x44\x44\x44\x44\x44\x44\x44"
15203 "\x44\x44\x44\x44\x44\x44\x44\x44",
15204 .rlen = 32,
15205 }, {
15206 .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
15207 "\x23\x53\x60\x28\x74\x71\x35\x26"
15208 "\x31\x41\x59\x26\x53\x58\x97\x93"
15209 "\x23\x84\x62\x64\x33\x83\x27\x95",
15210 .klen = 32,
15211 .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
15212 "\x00\x00\x00\x00\x00\x00\x00\x00",
15213 .input = "\x57\xb5\xf8\x71\x6e\x6d\xdd\x82"
15214 "\x53\xd0\xed\x2d\x30\xc1\x20\xef"
15215 "\x70\x67\x5e\xff\x09\x70\xbb\xc1"
15216 "\x3a\x7b\x48\x26\xd9\x0b\xf4\x48"
15217 "\xbe\xce\xb1\xc7\xb2\x67\xc4\xa7"
15218 "\x76\xf8\x36\x30\xb7\xb4\x9a\xd9"
15219 "\xf5\x9d\xd0\x7b\xc1\x06\x96\x44"
15220 "\x19\xc5\x58\x84\x63\xb9\x12\x68"
15221 "\x68\xc7\xaa\x18\x98\xf2\x1f\x5c"
15222 "\x39\xa6\xd8\x32\x2b\xc3\x51\xfd"
15223 "\x74\x79\x2e\xb4\x44\xd7\x69\xc4"
15224 "\xfc\x29\xe6\xed\x26\x1e\xa6\x9d"
15225 "\x1c\xbe\x00\x0e\x7f\x3a\xca\xfb"
15226 "\x6d\x13\x65\xa0\xf9\x31\x12\xe2"
15227 "\x26\xd1\xec\x2b\x0a\x8b\x59\x99"
15228 "\xa7\x49\xa0\x0e\x09\x33\x85\x50"
15229 "\xc3\x23\xca\x7a\xdd\x13\x45\x5f"
15230 "\xde\x4c\xa7\xcb\x00\x8a\x66\x6f"
15231 "\xa2\xb6\xb1\x2e\xe1\xa0\x18\xf6"
15232 "\xad\xf3\xbd\xeb\xc7\xef\x55\x4f"
15233 "\x79\x91\x8d\x36\x13\x7b\xd0\x4a"
15234 "\x6c\x39\xfb\x53\xb8\x6f\x02\x51"
15235 "\xa5\x20\xac\x24\x1c\x73\x59\x73"
15236 "\x58\x61\x3a\x87\x58\xb3\x20\x56"
15237 "\x39\x06\x2b\x4d\xd3\x20\x2b\x89"
15238 "\x3f\xa2\xf0\x96\xeb\x7f\xa4\xcd"
15239 "\x11\xae\xbd\xcb\x3a\xb4\xd9\x91"
15240 "\x09\x35\x71\x50\x65\xac\x92\xe3"
15241 "\x7b\x32\xc0\x7a\xdd\xd4\xc3\x92"
15242 "\x6f\xeb\x79\xde\x6f\xd3\x25\xc9"
15243 "\xcd\x63\xf5\x1e\x7a\x3b\x26\x9d"
15244 "\x77\x04\x80\xa9\xbf\x38\xb5\xbd"
15245 "\xb8\x05\x07\xbd\xfd\xab\x7b\xf8"
15246 "\x2a\x26\xcc\x49\x14\x6d\x55\x01"
15247 "\x06\x94\xd8\xb2\x2d\x53\x83\x1b"
15248 "\x8f\xd4\xdd\x57\x12\x7e\x18\xba"
15249 "\x8e\xe2\x4d\x80\xef\x7e\x6b\x9d"
15250 "\x24\xa9\x60\xa4\x97\x85\x86\x2a"
15251 "\x01\x00\x09\xf1\xcb\x4a\x24\x1c"
15252 "\xd8\xf6\xe6\x5b\xe7\x5d\xf2\xc4"
15253 "\x97\x1c\x10\xc6\x4d\x66\x4f\x98"
15254 "\x87\x30\xac\xd5\xea\x73\x49\x10"
15255 "\x80\xea\xe5\x5f\x4d\x5f\x03\x33"
15256 "\x66\x02\x35\x3d\x60\x06\x36\x4f"
15257 "\x14\x1c\xd8\x07\x1f\x78\xd0\xf8"
15258 "\x4f\x6c\x62\x7c\x15\xa5\x7c\x28"
15259 "\x7c\xcc\xeb\x1f\xd1\x07\x90\x93"
15260 "\x7e\xc2\xa8\x3a\x80\xc0\xf5\x30"
15261 "\xcc\x75\xcf\x16\x26\xa9\x26\x3b"
15262 "\xe7\x68\x2f\x15\x21\x5b\xe4\x00"
15263 "\xbd\x48\x50\xcd\x75\x70\xc4\x62"
15264 "\xbb\x41\xfb\x89\x4a\x88\x3b\x3b"
15265 "\x51\x66\x02\x69\x04\x97\x36\xd4"
15266 "\x75\xae\x0b\xa3\x42\xf8\xca\x79"
15267 "\x8f\x93\xe9\xcc\x38\xbd\xd6\xd2"
15268 "\xf9\x70\x4e\xc3\x6a\x8e\x25\xbd"
15269 "\xea\x15\x5a\xa0\x85\x7e\x81\x0d"
15270 "\x03\xe7\x05\x39\xf5\x05\x26\xee"
15271 "\xec\xaa\x1f\x3d\xc9\x98\x76\x01"
15272 "\x2c\xf4\xfc\xa3\x88\x77\x38\xc4"
15273 "\x50\x65\x50\x6d\x04\x1f\xdf\x5a"
15274 "\xaa\xf2\x01\xa9\xc1\x8d\xee\xca"
15275 "\x47\x26\xef\x39\xb8\xb4\xf2\xd1"
15276 "\xd6\xbb\x1b\x2a\xc1\x34\x14\xcf",
15277 .ilen = 512,
15278 .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
15279 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
15280 "\x10\x11\x12\x13\x14\x15\x16\x17"
15281 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
15282 "\x20\x21\x22\x23\x24\x25\x26\x27"
15283 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
15284 "\x30\x31\x32\x33\x34\x35\x36\x37"
15285 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
15286 "\x40\x41\x42\x43\x44\x45\x46\x47"
15287 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
15288 "\x50\x51\x52\x53\x54\x55\x56\x57"
15289 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
15290 "\x60\x61\x62\x63\x64\x65\x66\x67"
15291 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
15292 "\x70\x71\x72\x73\x74\x75\x76\x77"
15293 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
15294 "\x80\x81\x82\x83\x84\x85\x86\x87"
15295 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
15296 "\x90\x91\x92\x93\x94\x95\x96\x97"
15297 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
15298 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
15299 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
15300 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
15301 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
15302 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
15303 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
15304 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
15305 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
15306 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
15307 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
15308 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
15309 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
15310 "\x00\x01\x02\x03\x04\x05\x06\x07"
15311 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
15312 "\x10\x11\x12\x13\x14\x15\x16\x17"
15313 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
15314 "\x20\x21\x22\x23\x24\x25\x26\x27"
15315 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
15316 "\x30\x31\x32\x33\x34\x35\x36\x37"
15317 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
15318 "\x40\x41\x42\x43\x44\x45\x46\x47"
15319 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
15320 "\x50\x51\x52\x53\x54\x55\x56\x57"
15321 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
15322 "\x60\x61\x62\x63\x64\x65\x66\x67"
15323 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
15324 "\x70\x71\x72\x73\x74\x75\x76\x77"
15325 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
15326 "\x80\x81\x82\x83\x84\x85\x86\x87"
15327 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
15328 "\x90\x91\x92\x93\x94\x95\x96\x97"
15329 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
15330 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
15331 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
15332 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
15333 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
15334 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
15335 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
15336 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
15337 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
15338 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
15339 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
15340 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
15341 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
15342 .rlen = 512,
15343 }, {
15344 .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
15345 "\x23\x53\x60\x28\x74\x71\x35\x26"
15346 "\x62\x49\x77\x57\x24\x70\x93\x69"
15347 "\x99\x59\x57\x49\x66\x96\x76\x27"
15348 "\x31\x41\x59\x26\x53\x58\x97\x93"
15349 "\x23\x84\x62\x64\x33\x83\x27\x95"
15350 "\x02\x88\x41\x97\x16\x93\x99\x37"
15351 "\x51\x05\x82\x09\x74\x94\x45\x92",
15352 .klen = 64,
15353 .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
15354 "\x00\x00\x00\x00\x00\x00\x00\x00",
15355 .input = "\xc5\x85\x2a\x4b\x73\xe4\xf6\xf1"
15356 "\x7e\xf9\xf6\xe9\xa3\x73\x36\xcb"
15357 "\xaa\xb6\x22\xb0\x24\x6e\x3d\x73"
15358 "\x92\x99\xde\xd3\x76\xed\xcd\x63"
15359 "\x64\x3a\x22\x57\xc1\x43\x49\xd4"
15360 "\x79\x36\x31\x19\x62\xae\x10\x7e"
15361 "\x7d\xcf\x7a\xe2\x6b\xce\x27\xfa"
15362 "\xdc\x3d\xd9\x83\xd3\x42\x4c\xe0"
15363 "\x1b\xd6\x1d\x1a\x6f\xd2\x03\x00"
15364 "\xfc\x81\x99\x8a\x14\x62\xf5\x7e"
15365 "\x0d\xe7\x12\xe8\x17\x9d\x0b\xec"
15366 "\xe2\xf7\xc9\xa7\x63\xd1\x79\xb6"
15367 "\x62\x62\x37\xfe\x0a\x4c\x4a\x37"
15368 "\x70\xc7\x5e\x96\x5f\xbc\x8e\x9e"
15369 "\x85\x3c\x4f\x26\x64\x85\xbc\x68"
15370 "\xb0\xe0\x86\x5e\x26\x41\xce\x11"
15371 "\x50\xda\x97\x14\xe9\x9e\xc7\x6d"
15372 "\x3b\xdc\x43\xde\x2b\x27\x69\x7d"
15373 "\xfc\xb0\x28\xbd\x8f\xb1\xc6\x31"
15374 "\x14\x4d\xf0\x74\x37\xfd\x07\x25"
15375 "\x96\x55\xe5\xfc\x9e\x27\x2a\x74"
15376 "\x1b\x83\x4d\x15\x83\xac\x57\xa0"
15377 "\xac\xa5\xd0\x38\xef\x19\x56\x53"
15378 "\x25\x4b\xfc\xce\x04\x23\xe5\x6b"
15379 "\xf6\xc6\x6c\x32\x0b\xb3\x12\xc5"
15380 "\xed\x22\x34\x1c\x5d\xed\x17\x06"
15381 "\x36\xa3\xe6\x77\xb9\x97\x46\xb8"
15382 "\xe9\x3f\x7e\xc7\xbc\x13\x5c\xdc"
15383 "\x6e\x3f\x04\x5e\xd1\x59\xa5\x82"
15384 "\x35\x91\x3d\x1b\xe4\x97\x9f\x92"
15385 "\x1c\x5e\x5f\x6f\x41\xd4\x62\xa1"
15386 "\x8d\x39\xfc\x42\xfb\x38\x80\xb9"
15387 "\x0a\xe3\xcc\x6a\x93\xd9\x7a\xb1"
15388 "\xe9\x69\xaf\x0a\x6b\x75\x38\xa7"
15389 "\xa1\xbf\xf7\xda\x95\x93\x4b\x78"
15390 "\x19\xf5\x94\xf9\xd2\x00\x33\x37"
15391 "\xcf\xf5\x9e\x9c\xf3\xcc\xa6\xee"
15392 "\x42\xb2\x9e\x2c\x5f\x48\x23\x26"
15393 "\x15\x25\x17\x03\x3d\xfe\x2c\xfc"
15394 "\xeb\xba\xda\xe0\x00\x05\xb6\xa6"
15395 "\x07\xb3\xe8\x36\x5b\xec\x5b\xbf"
15396 "\xd6\x5b\x00\x74\xc6\x97\xf1\x6a"
15397 "\x49\xa1\xc3\xfa\x10\x52\xb9\x14"
15398 "\xad\xb7\x73\xf8\x78\x12\xc8\x59"
15399 "\x17\x80\x4c\x57\x39\xf1\x6d\x80"
15400 "\x25\x77\x0f\x5e\x7d\xf0\xaf\x21"
15401 "\xec\xce\xb7\xc8\x02\x8a\xed\x53"
15402 "\x2c\x25\x68\x2e\x1f\x85\x5e\x67"
15403 "\xd1\x07\x7a\x3a\x89\x08\xe0\x34"
15404 "\xdc\xdb\x26\xb4\x6b\x77\xfc\x40"
15405 "\x31\x15\x72\xa0\xf0\x73\xd9\x3b"
15406 "\xd5\xdb\xfe\xfc\x8f\xa9\x44\xa2"
15407 "\x09\x9f\xc6\x33\xe5\xe2\x88\xe8"
15408 "\xf3\xf0\x1a\xf4\xce\x12\x0f\xd6"
15409 "\xf7\x36\xe6\xa4\xf4\x7a\x10\x58"
15410 "\xcc\x1f\x48\x49\x65\x47\x75\xe9"
15411 "\x28\xe1\x65\x7b\xf2\xc4\xb5\x07"
15412 "\xf2\xec\x76\xd8\x8f\x09\xf3\x16"
15413 "\xa1\x51\x89\x3b\xeb\x96\x42\xac"
15414 "\x65\xe0\x67\x63\x29\xdc\xb4\x7d"
15415 "\xf2\x41\x51\x6a\xcb\xde\x3c\xfb"
15416 "\x66\x8d\x13\xca\xe0\x59\x2a\x00"
15417 "\xc9\x53\x4c\xe6\x9e\xe2\x73\xd5"
15418 "\x67\x19\xb2\xbd\x9a\x63\xd7\x5c",
15419 .ilen = 512,
15420 .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
15421 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
15422 "\x10\x11\x12\x13\x14\x15\x16\x17"
15423 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
15424 "\x20\x21\x22\x23\x24\x25\x26\x27"
15425 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
15426 "\x30\x31\x32\x33\x34\x35\x36\x37"
15427 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
15428 "\x40\x41\x42\x43\x44\x45\x46\x47"
15429 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
15430 "\x50\x51\x52\x53\x54\x55\x56\x57"
15431 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
15432 "\x60\x61\x62\x63\x64\x65\x66\x67"
15433 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
15434 "\x70\x71\x72\x73\x74\x75\x76\x77"
15435 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
15436 "\x80\x81\x82\x83\x84\x85\x86\x87"
15437 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
15438 "\x90\x91\x92\x93\x94\x95\x96\x97"
15439 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
15440 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
15441 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
15442 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
15443 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
15444 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
15445 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
15446 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
15447 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
15448 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
15449 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
15450 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
15451 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
15452 "\x00\x01\x02\x03\x04\x05\x06\x07"
15453 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
15454 "\x10\x11\x12\x13\x14\x15\x16\x17"
15455 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
15456 "\x20\x21\x22\x23\x24\x25\x26\x27"
15457 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
15458 "\x30\x31\x32\x33\x34\x35\x36\x37"
15459 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
15460 "\x40\x41\x42\x43\x44\x45\x46\x47"
15461 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
15462 "\x50\x51\x52\x53\x54\x55\x56\x57"
15463 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
15464 "\x60\x61\x62\x63\x64\x65\x66\x67"
15465 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
15466 "\x70\x71\x72\x73\x74\x75\x76\x77"
15467 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
15468 "\x80\x81\x82\x83\x84\x85\x86\x87"
15469 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
15470 "\x90\x91\x92\x93\x94\x95\x96\x97"
15471 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
15472 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
15473 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
15474 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
15475 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
15476 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
15477 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
15478 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
15479 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
15480 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
15481 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
15482 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
15483 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
15484 .rlen = 512,
15485 .also_non_np = 1,
15486 .np = 3,
15487 .tap = { 512 - 20, 4, 16 },
15488 }
15489};
15490
15491static const struct cipher_testvec speck64_enc_tv_template[] = {
15492 { /* Speck64/96 */
15493 .key = "\x00\x01\x02\x03\x08\x09\x0a\x0b"
15494 "\x10\x11\x12\x13",
15495 .klen = 12,
15496 .input = "\x65\x61\x6e\x73\x20\x46\x61\x74",
15497 .ilen = 8,
15498 .result = "\x6c\x94\x75\x41\xec\x52\x79\x9f",
15499 .rlen = 8,
15500 }, { /* Speck64/128 */
15501 .key = "\x00\x01\x02\x03\x08\x09\x0a\x0b"
15502 "\x10\x11\x12\x13\x18\x19\x1a\x1b",
15503 .klen = 16,
15504 .input = "\x2d\x43\x75\x74\x74\x65\x72\x3b",
15505 .ilen = 8,
15506 .result = "\x8b\x02\x4e\x45\x48\xa5\x6f\x8c",
15507 .rlen = 8,
15508 },
15509};
15510
15511static const struct cipher_testvec speck64_dec_tv_template[] = {
15512 { /* Speck64/96 */
15513 .key = "\x00\x01\x02\x03\x08\x09\x0a\x0b"
15514 "\x10\x11\x12\x13",
15515 .klen = 12,
15516 .input = "\x6c\x94\x75\x41\xec\x52\x79\x9f",
15517 .ilen = 8,
15518 .result = "\x65\x61\x6e\x73\x20\x46\x61\x74",
15519 .rlen = 8,
15520 }, { /* Speck64/128 */
15521 .key = "\x00\x01\x02\x03\x08\x09\x0a\x0b"
15522 "\x10\x11\x12\x13\x18\x19\x1a\x1b",
15523 .klen = 16,
15524 .input = "\x8b\x02\x4e\x45\x48\xa5\x6f\x8c",
15525 .ilen = 8,
15526 .result = "\x2d\x43\x75\x74\x74\x65\x72\x3b",
15527 .rlen = 8,
15528 },
15529};
15530
15531/*
15532 * Speck64-XTS test vectors, taken from the AES-XTS test vectors with the result
15533 * recomputed with Speck64 as the cipher, and key lengths adjusted
15534 */
15535
15536static const struct cipher_testvec speck64_xts_enc_tv_template[] = {
15537 {
15538 .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
15539 "\x00\x00\x00\x00\x00\x00\x00\x00"
15540 "\x00\x00\x00\x00\x00\x00\x00\x00",
15541 .klen = 24,
15542 .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
15543 "\x00\x00\x00\x00\x00\x00\x00\x00",
15544 .input = "\x00\x00\x00\x00\x00\x00\x00\x00"
15545 "\x00\x00\x00\x00\x00\x00\x00\x00"
15546 "\x00\x00\x00\x00\x00\x00\x00\x00"
15547 "\x00\x00\x00\x00\x00\x00\x00\x00",
15548 .ilen = 32,
15549 .result = "\x84\xaf\x54\x07\x19\xd4\x7c\xa6"
15550 "\xe4\xfe\xdf\xc4\x1f\x34\xc3\xc2"
15551 "\x80\xf5\x72\xe7\xcd\xf0\x99\x22"
15552 "\x35\xa7\x2f\x06\xef\xdc\x51\xaa",
15553 .rlen = 32,
15554 }, {
15555 .key = "\x11\x11\x11\x11\x11\x11\x11\x11"
15556 "\x11\x11\x11\x11\x11\x11\x11\x11"
15557 "\x22\x22\x22\x22\x22\x22\x22\x22",
15558 .klen = 24,
15559 .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
15560 "\x00\x00\x00\x00\x00\x00\x00\x00",
15561 .input = "\x44\x44\x44\x44\x44\x44\x44\x44"
15562 "\x44\x44\x44\x44\x44\x44\x44\x44"
15563 "\x44\x44\x44\x44\x44\x44\x44\x44"
15564 "\x44\x44\x44\x44\x44\x44\x44\x44",
15565 .ilen = 32,
15566 .result = "\x12\x56\x73\xcd\x15\x87\xa8\x59"
15567 "\xcf\x84\xae\xd9\x1c\x66\xd6\x9f"
15568 "\xb3\x12\x69\x7e\x36\xeb\x52\xff"
15569 "\x62\xdd\xba\x90\xb3\xe1\xee\x99",
15570 .rlen = 32,
15571 }, {
15572 .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8"
15573 "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0"
15574 "\x22\x22\x22\x22\x22\x22\x22\x22",
15575 .klen = 24,
15576 .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
15577 "\x00\x00\x00\x00\x00\x00\x00\x00",
15578 .input = "\x44\x44\x44\x44\x44\x44\x44\x44"
15579 "\x44\x44\x44\x44\x44\x44\x44\x44"
15580 "\x44\x44\x44\x44\x44\x44\x44\x44"
15581 "\x44\x44\x44\x44\x44\x44\x44\x44",
15582 .ilen = 32,
15583 .result = "\x15\x1b\xe4\x2c\xa2\x5a\x2d\x2c"
15584 "\x27\x36\xc0\xbf\x5d\xea\x36\x37"
15585 "\x2d\x1a\x88\xbc\x66\xb5\xd0\x0b"
15586 "\xa1\xbc\x19\xb2\x0f\x3b\x75\x34",
15587 .rlen = 32,
15588 }, {
15589 .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
15590 "\x23\x53\x60\x28\x74\x71\x35\x26"
15591 "\x31\x41\x59\x26\x53\x58\x97\x93",
15592 .klen = 24,
15593 .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
15594 "\x00\x00\x00\x00\x00\x00\x00\x00",
15595 .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
15596 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
15597 "\x10\x11\x12\x13\x14\x15\x16\x17"
15598 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
15599 "\x20\x21\x22\x23\x24\x25\x26\x27"
15600 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
15601 "\x30\x31\x32\x33\x34\x35\x36\x37"
15602 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
15603 "\x40\x41\x42\x43\x44\x45\x46\x47"
15604 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
15605 "\x50\x51\x52\x53\x54\x55\x56\x57"
15606 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
15607 "\x60\x61\x62\x63\x64\x65\x66\x67"
15608 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
15609 "\x70\x71\x72\x73\x74\x75\x76\x77"
15610 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
15611 "\x80\x81\x82\x83\x84\x85\x86\x87"
15612 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
15613 "\x90\x91\x92\x93\x94\x95\x96\x97"
15614 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
15615 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
15616 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
15617 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
15618 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
15619 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
15620 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
15621 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
15622 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
15623 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
15624 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
15625 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
15626 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
15627 "\x00\x01\x02\x03\x04\x05\x06\x07"
15628 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
15629 "\x10\x11\x12\x13\x14\x15\x16\x17"
15630 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
15631 "\x20\x21\x22\x23\x24\x25\x26\x27"
15632 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
15633 "\x30\x31\x32\x33\x34\x35\x36\x37"
15634 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
15635 "\x40\x41\x42\x43\x44\x45\x46\x47"
15636 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
15637 "\x50\x51\x52\x53\x54\x55\x56\x57"
15638 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
15639 "\x60\x61\x62\x63\x64\x65\x66\x67"
15640 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
15641 "\x70\x71\x72\x73\x74\x75\x76\x77"
15642 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
15643 "\x80\x81\x82\x83\x84\x85\x86\x87"
15644 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
15645 "\x90\x91\x92\x93\x94\x95\x96\x97"
15646 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
15647 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
15648 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
15649 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
15650 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
15651 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
15652 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
15653 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
15654 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
15655 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
15656 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
15657 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
15658 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
15659 .ilen = 512,
15660 .result = "\xaf\xa1\x81\xa6\x32\xbb\x15\x8e"
15661 "\xf8\x95\x2e\xd3\xe6\xee\x7e\x09"
15662 "\x0c\x1a\xf5\x02\x97\x8b\xe3\xb3"
15663 "\x11\xc7\x39\x96\xd0\x95\xf4\x56"
15664 "\xf4\xdd\x03\x38\x01\x44\x2c\xcf"
15665 "\x88\xae\x8e\x3c\xcd\xe7\xaa\x66"
15666 "\xfe\x3d\xc6\xfb\x01\x23\x51\x43"
15667 "\xd5\xd2\x13\x86\x94\x34\xe9\x62"
15668 "\xf9\x89\xe3\xd1\x7b\xbe\xf8\xef"
15669 "\x76\x35\x04\x3f\xdb\x23\x9d\x0b"
15670 "\x85\x42\xb9\x02\xd6\xcc\xdb\x96"
15671 "\xa7\x6b\x27\xb6\xd4\x45\x8f\x7d"
15672 "\xae\xd2\x04\xd5\xda\xc1\x7e\x24"
15673 "\x8c\x73\xbe\x48\x7e\xcf\x65\x28"
15674 "\x29\xe5\xbe\x54\x30\xcb\x46\x95"
15675 "\x4f\x2e\x8a\x36\xc8\x27\xc5\xbe"
15676 "\xd0\x1a\xaf\xab\x26\xcd\x9e\x69"
15677 "\xa1\x09\x95\x71\x26\xe9\xc4\xdf"
15678 "\xe6\x31\xc3\x46\xda\xaf\x0b\x41"
15679 "\x1f\xab\xb1\x8e\xd6\xfc\x0b\xb3"
15680 "\x82\xc0\x37\x27\xfc\x91\xa7\x05"
15681 "\xfb\xc5\xdc\x2b\x74\x96\x48\x43"
15682 "\x5d\x9c\x19\x0f\x60\x63\x3a\x1f"
15683 "\x6f\xf0\x03\xbe\x4d\xfd\xc8\x4a"
15684 "\xc6\xa4\x81\x6d\xc3\x12\x2a\x5c"
15685 "\x07\xff\xf3\x72\x74\x48\xb5\x40"
15686 "\x50\xb5\xdd\x90\x43\x31\x18\x15"
15687 "\x7b\xf2\xa6\xdb\x83\xc8\x4b\x4a"
15688 "\x29\x93\x90\x8b\xda\x07\xf0\x35"
15689 "\x6d\x90\x88\x09\x4e\x83\xf5\x5b"
15690 "\x94\x12\xbb\x33\x27\x1d\x3f\x23"
15691 "\x51\xa8\x7c\x07\xa2\xae\x77\xa6"
15692 "\x50\xfd\xcc\xc0\x4f\x80\x7a\x9f"
15693 "\x66\xdd\xcd\x75\x24\x8b\x33\xf7"
15694 "\x20\xdb\x83\x9b\x4f\x11\x63\x6e"
15695 "\xcf\x37\xef\xc9\x11\x01\x5c\x45"
15696 "\x32\x99\x7c\x3c\x9e\x42\x89\xe3"
15697 "\x70\x6d\x15\x9f\xb1\xe6\xb6\x05"
15698 "\xfe\x0c\xb9\x49\x2d\x90\x6d\xcc"
15699 "\x5d\x3f\xc1\xfe\x89\x0a\x2e\x2d"
15700 "\xa0\xa8\x89\x3b\x73\x39\xa5\x94"
15701 "\x4c\xa4\xa6\xbb\xa7\x14\x46\x89"
15702 "\x10\xff\xaf\xef\xca\xdd\x4f\x80"
15703 "\xb3\xdf\x3b\xab\xd4\xe5\x5a\xc7"
15704 "\x33\xca\x00\x8b\x8b\x3f\xea\xec"
15705 "\x68\x8a\xc2\x6d\xfd\xd4\x67\x0f"
15706 "\x22\x31\xe1\x0e\xfe\x5a\x04\xd5"
15707 "\x64\xa3\xf1\x1a\x76\x28\xcc\x35"
15708 "\x36\xa7\x0a\x74\xf7\x1c\x44\x9b"
15709 "\xc7\x1b\x53\x17\x02\xea\xd1\xad"
15710 "\x13\x51\x73\xc0\xa0\xb2\x05\x32"
15711 "\xa8\xa2\x37\x2e\xe1\x7a\x3a\x19"
15712 "\x26\xb4\x6c\x62\x5d\xb3\x1a\x1d"
15713 "\x59\xda\xee\x1a\x22\x18\xda\x0d"
15714 "\x88\x0f\x55\x8b\x72\x62\xfd\xc1"
15715 "\x69\x13\xcd\x0d\x5f\xc1\x09\x52"
15716 "\xee\xd6\xe3\x84\x4d\xee\xf6\x88"
15717 "\xaf\x83\xdc\x76\xf4\xc0\x93\x3f"
15718 "\x4a\x75\x2f\xb0\x0b\x3e\xc4\x54"
15719 "\x7d\x69\x8d\x00\x62\x77\x0d\x14"
15720 "\xbe\x7c\xa6\x7d\xc5\x24\x4f\xf3"
15721 "\x50\xf7\x5f\xf4\xc2\xca\x41\x97"
15722 "\x37\xbe\x75\x74\xcd\xf0\x75\x6e"
15723 "\x25\x23\x94\xbd\xda\x8d\xb0\xd4",
15724 .rlen = 512,
15725 }, {
15726 .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
15727 "\x23\x53\x60\x28\x74\x71\x35\x26"
15728 "\x62\x49\x77\x57\x24\x70\x93\x69"
15729 "\x99\x59\x57\x49\x66\x96\x76\x27",
15730 .klen = 32,
15731 .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
15732 "\x00\x00\x00\x00\x00\x00\x00\x00",
15733 .input = "\x00\x01\x02\x03\x04\x05\x06\x07"
15734 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
15735 "\x10\x11\x12\x13\x14\x15\x16\x17"
15736 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
15737 "\x20\x21\x22\x23\x24\x25\x26\x27"
15738 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
15739 "\x30\x31\x32\x33\x34\x35\x36\x37"
15740 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
15741 "\x40\x41\x42\x43\x44\x45\x46\x47"
15742 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
15743 "\x50\x51\x52\x53\x54\x55\x56\x57"
15744 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
15745 "\x60\x61\x62\x63\x64\x65\x66\x67"
15746 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
15747 "\x70\x71\x72\x73\x74\x75\x76\x77"
15748 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
15749 "\x80\x81\x82\x83\x84\x85\x86\x87"
15750 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
15751 "\x90\x91\x92\x93\x94\x95\x96\x97"
15752 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
15753 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
15754 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
15755 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
15756 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
15757 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
15758 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
15759 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
15760 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
15761 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
15762 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
15763 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
15764 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
15765 "\x00\x01\x02\x03\x04\x05\x06\x07"
15766 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
15767 "\x10\x11\x12\x13\x14\x15\x16\x17"
15768 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
15769 "\x20\x21\x22\x23\x24\x25\x26\x27"
15770 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
15771 "\x30\x31\x32\x33\x34\x35\x36\x37"
15772 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
15773 "\x40\x41\x42\x43\x44\x45\x46\x47"
15774 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
15775 "\x50\x51\x52\x53\x54\x55\x56\x57"
15776 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
15777 "\x60\x61\x62\x63\x64\x65\x66\x67"
15778 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
15779 "\x70\x71\x72\x73\x74\x75\x76\x77"
15780 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
15781 "\x80\x81\x82\x83\x84\x85\x86\x87"
15782 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
15783 "\x90\x91\x92\x93\x94\x95\x96\x97"
15784 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
15785 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
15786 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
15787 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
15788 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
15789 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
15790 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
15791 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
15792 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
15793 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
15794 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
15795 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
15796 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
15797 .ilen = 512,
15798 .result = "\x55\xed\x71\xd3\x02\x8e\x15\x3b"
15799 "\xc6\x71\x29\x2d\x3e\x89\x9f\x59"
15800 "\x68\x6a\xcc\x8a\x56\x97\xf3\x95"
15801 "\x4e\x51\x08\xda\x2a\xf8\x6f\x3c"
15802 "\x78\x16\xea\x80\xdb\x33\x75\x94"
15803 "\xf9\x29\xc4\x2b\x76\x75\x97\xc7"
15804 "\xf2\x98\x2c\xf9\xff\xc8\xd5\x2b"
15805 "\x18\xf1\xaf\xcf\x7c\xc5\x0b\xee"
15806 "\xad\x3c\x76\x7c\xe6\x27\xa2\x2a"
15807 "\xe4\x66\xe1\xab\xa2\x39\xfc\x7c"
15808 "\xf5\xec\x32\x74\xa3\xb8\x03\x88"
15809 "\x52\xfc\x2e\x56\x3f\xa1\xf0\x9f"
15810 "\x84\x5e\x46\xed\x20\x89\xb6\x44"
15811 "\x8d\xd0\xed\x54\x47\x16\xbe\x95"
15812 "\x8a\xb3\x6b\x72\xc4\x32\x52\x13"
15813 "\x1b\xb0\x82\xbe\xac\xf9\x70\xa6"
15814 "\x44\x18\xdd\x8c\x6e\xca\x6e\x45"
15815 "\x8f\x1e\x10\x07\x57\x25\x98\x7b"
15816 "\x17\x8c\x78\xdd\x80\xa7\xd9\xd8"
15817 "\x63\xaf\xb9\x67\x57\xfd\xbc\xdb"
15818 "\x44\xe9\xc5\x65\xd1\xc7\x3b\xff"
15819 "\x20\xa0\x80\x1a\xc3\x9a\xad\x5e"
15820 "\x5d\x3b\xd3\x07\xd9\xf5\xfd\x3d"
15821 "\x4a\x8b\xa8\xd2\x6e\x7a\x51\x65"
15822 "\x6c\x8e\x95\xe0\x45\xc9\x5f\x4a"
15823 "\x09\x3c\x3d\x71\x7f\x0c\x84\x2a"
15824 "\xc8\x48\x52\x1a\xc2\xd5\xd6\x78"
15825 "\x92\x1e\xa0\x90\x2e\xea\xf0\xf3"
15826 "\xdc\x0f\xb1\xaf\x0d\x9b\x06\x2e"
15827 "\x35\x10\x30\x82\x0d\xe7\xc5\x9b"
15828 "\xde\x44\x18\xbd\x9f\xd1\x45\xa9"
15829 "\x7b\x7a\x4a\xad\x35\x65\x27\xca"
15830 "\xb2\xc3\xd4\x9b\x71\x86\x70\xee"
15831 "\xf1\x89\x3b\x85\x4b\x5b\xaa\xaf"
15832 "\xfc\x42\xc8\x31\x59\xbe\x16\x60"
15833 "\x4f\xf9\xfa\x12\xea\xd0\xa7\x14"
15834 "\xf0\x7a\xf3\xd5\x8d\xbd\x81\xef"
15835 "\x52\x7f\x29\x51\x94\x20\x67\x3c"
15836 "\xd1\xaf\x77\x9f\x22\x5a\x4e\x63"
15837 "\xe7\xff\x73\x25\xd1\xdd\x96\x8a"
15838 "\x98\x52\x6d\xf3\xac\x3e\xf2\x18"
15839 "\x6d\xf6\x0a\x29\xa6\x34\x3d\xed"
15840 "\xe3\x27\x0d\x9d\x0a\x02\x44\x7e"
15841 "\x5a\x7e\x67\x0f\x0a\x9e\xd6\xad"
15842 "\x91\xe6\x4d\x81\x8c\x5c\x59\xaa"
15843 "\xfb\xeb\x56\x53\xd2\x7d\x4c\x81"
15844 "\x65\x53\x0f\x41\x11\xbd\x98\x99"
15845 "\xf9\xc6\xfa\x51\x2e\xa3\xdd\x8d"
15846 "\x84\x98\xf9\x34\xed\x33\x2a\x1f"
15847 "\x82\xed\xc1\x73\x98\xd3\x02\xdc"
15848 "\xe6\xc2\x33\x1d\xa2\xb4\xca\x76"
15849 "\x63\x51\x34\x9d\x96\x12\xae\xce"
15850 "\x83\xc9\x76\x5e\xa4\x1b\x53\x37"
15851 "\x17\xd5\xc0\x80\x1d\x62\xf8\x3d"
15852 "\x54\x27\x74\xbb\x10\x86\x57\x46"
15853 "\x68\xe1\xed\x14\xe7\x9d\xfc\x84"
15854 "\x47\xbc\xc2\xf8\x19\x4b\x99\xcf"
15855 "\x7a\xe9\xc4\xb8\x8c\x82\x72\x4d"
15856 "\x7b\x4f\x38\x55\x36\x71\x64\xc1"
15857 "\xfc\x5c\x75\x52\x33\x02\x18\xf8"
15858 "\x17\xe1\x2b\xc2\x43\x39\xbd\x76"
15859 "\x9b\x63\x76\x32\x2f\x19\x72\x10"
15860 "\x9f\x21\x0c\xf1\x66\x50\x7f\xa5"
15861 "\x0d\x1f\x46\xe0\xba\xd3\x2f\x3c",
15862 .rlen = 512,
15863 .also_non_np = 1,
15864 .np = 3,
15865 .tap = { 512 - 20, 4, 16 },
15866 }
15867};
15868
15869static const struct cipher_testvec speck64_xts_dec_tv_template[] = {
15870 {
15871 .key = "\x00\x00\x00\x00\x00\x00\x00\x00"
15872 "\x00\x00\x00\x00\x00\x00\x00\x00"
15873 "\x00\x00\x00\x00\x00\x00\x00\x00",
15874 .klen = 24,
15875 .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
15876 "\x00\x00\x00\x00\x00\x00\x00\x00",
15877 .input = "\x84\xaf\x54\x07\x19\xd4\x7c\xa6"
15878 "\xe4\xfe\xdf\xc4\x1f\x34\xc3\xc2"
15879 "\x80\xf5\x72\xe7\xcd\xf0\x99\x22"
15880 "\x35\xa7\x2f\x06\xef\xdc\x51\xaa",
15881 .ilen = 32,
15882 .result = "\x00\x00\x00\x00\x00\x00\x00\x00"
15883 "\x00\x00\x00\x00\x00\x00\x00\x00"
15884 "\x00\x00\x00\x00\x00\x00\x00\x00"
15885 "\x00\x00\x00\x00\x00\x00\x00\x00",
15886 .rlen = 32,
15887 }, {
15888 .key = "\x11\x11\x11\x11\x11\x11\x11\x11"
15889 "\x11\x11\x11\x11\x11\x11\x11\x11"
15890 "\x22\x22\x22\x22\x22\x22\x22\x22",
15891 .klen = 24,
15892 .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
15893 "\x00\x00\x00\x00\x00\x00\x00\x00",
15894 .input = "\x12\x56\x73\xcd\x15\x87\xa8\x59"
15895 "\xcf\x84\xae\xd9\x1c\x66\xd6\x9f"
15896 "\xb3\x12\x69\x7e\x36\xeb\x52\xff"
15897 "\x62\xdd\xba\x90\xb3\xe1\xee\x99",
15898 .ilen = 32,
15899 .result = "\x44\x44\x44\x44\x44\x44\x44\x44"
15900 "\x44\x44\x44\x44\x44\x44\x44\x44"
15901 "\x44\x44\x44\x44\x44\x44\x44\x44"
15902 "\x44\x44\x44\x44\x44\x44\x44\x44",
15903 .rlen = 32,
15904 }, {
15905 .key = "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8"
15906 "\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0"
15907 "\x22\x22\x22\x22\x22\x22\x22\x22",
15908 .klen = 24,
15909 .iv = "\x33\x33\x33\x33\x33\x00\x00\x00"
15910 "\x00\x00\x00\x00\x00\x00\x00\x00",
15911 .input = "\x15\x1b\xe4\x2c\xa2\x5a\x2d\x2c"
15912 "\x27\x36\xc0\xbf\x5d\xea\x36\x37"
15913 "\x2d\x1a\x88\xbc\x66\xb5\xd0\x0b"
15914 "\xa1\xbc\x19\xb2\x0f\x3b\x75\x34",
15915 .ilen = 32,
15916 .result = "\x44\x44\x44\x44\x44\x44\x44\x44"
15917 "\x44\x44\x44\x44\x44\x44\x44\x44"
15918 "\x44\x44\x44\x44\x44\x44\x44\x44"
15919 "\x44\x44\x44\x44\x44\x44\x44\x44",
15920 .rlen = 32,
15921 }, {
15922 .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
15923 "\x23\x53\x60\x28\x74\x71\x35\x26"
15924 "\x31\x41\x59\x26\x53\x58\x97\x93",
15925 .klen = 24,
15926 .iv = "\x00\x00\x00\x00\x00\x00\x00\x00"
15927 "\x00\x00\x00\x00\x00\x00\x00\x00",
15928 .input = "\xaf\xa1\x81\xa6\x32\xbb\x15\x8e"
15929 "\xf8\x95\x2e\xd3\xe6\xee\x7e\x09"
15930 "\x0c\x1a\xf5\x02\x97\x8b\xe3\xb3"
15931 "\x11\xc7\x39\x96\xd0\x95\xf4\x56"
15932 "\xf4\xdd\x03\x38\x01\x44\x2c\xcf"
15933 "\x88\xae\x8e\x3c\xcd\xe7\xaa\x66"
15934 "\xfe\x3d\xc6\xfb\x01\x23\x51\x43"
15935 "\xd5\xd2\x13\x86\x94\x34\xe9\x62"
15936 "\xf9\x89\xe3\xd1\x7b\xbe\xf8\xef"
15937 "\x76\x35\x04\x3f\xdb\x23\x9d\x0b"
15938 "\x85\x42\xb9\x02\xd6\xcc\xdb\x96"
15939 "\xa7\x6b\x27\xb6\xd4\x45\x8f\x7d"
15940 "\xae\xd2\x04\xd5\xda\xc1\x7e\x24"
15941 "\x8c\x73\xbe\x48\x7e\xcf\x65\x28"
15942 "\x29\xe5\xbe\x54\x30\xcb\x46\x95"
15943 "\x4f\x2e\x8a\x36\xc8\x27\xc5\xbe"
15944 "\xd0\x1a\xaf\xab\x26\xcd\x9e\x69"
15945 "\xa1\x09\x95\x71\x26\xe9\xc4\xdf"
15946 "\xe6\x31\xc3\x46\xda\xaf\x0b\x41"
15947 "\x1f\xab\xb1\x8e\xd6\xfc\x0b\xb3"
15948 "\x82\xc0\x37\x27\xfc\x91\xa7\x05"
15949 "\xfb\xc5\xdc\x2b\x74\x96\x48\x43"
15950 "\x5d\x9c\x19\x0f\x60\x63\x3a\x1f"
15951 "\x6f\xf0\x03\xbe\x4d\xfd\xc8\x4a"
15952 "\xc6\xa4\x81\x6d\xc3\x12\x2a\x5c"
15953 "\x07\xff\xf3\x72\x74\x48\xb5\x40"
15954 "\x50\xb5\xdd\x90\x43\x31\x18\x15"
15955 "\x7b\xf2\xa6\xdb\x83\xc8\x4b\x4a"
15956 "\x29\x93\x90\x8b\xda\x07\xf0\x35"
15957 "\x6d\x90\x88\x09\x4e\x83\xf5\x5b"
15958 "\x94\x12\xbb\x33\x27\x1d\x3f\x23"
15959 "\x51\xa8\x7c\x07\xa2\xae\x77\xa6"
15960 "\x50\xfd\xcc\xc0\x4f\x80\x7a\x9f"
15961 "\x66\xdd\xcd\x75\x24\x8b\x33\xf7"
15962 "\x20\xdb\x83\x9b\x4f\x11\x63\x6e"
15963 "\xcf\x37\xef\xc9\x11\x01\x5c\x45"
15964 "\x32\x99\x7c\x3c\x9e\x42\x89\xe3"
15965 "\x70\x6d\x15\x9f\xb1\xe6\xb6\x05"
15966 "\xfe\x0c\xb9\x49\x2d\x90\x6d\xcc"
15967 "\x5d\x3f\xc1\xfe\x89\x0a\x2e\x2d"
15968 "\xa0\xa8\x89\x3b\x73\x39\xa5\x94"
15969 "\x4c\xa4\xa6\xbb\xa7\x14\x46\x89"
15970 "\x10\xff\xaf\xef\xca\xdd\x4f\x80"
15971 "\xb3\xdf\x3b\xab\xd4\xe5\x5a\xc7"
15972 "\x33\xca\x00\x8b\x8b\x3f\xea\xec"
15973 "\x68\x8a\xc2\x6d\xfd\xd4\x67\x0f"
15974 "\x22\x31\xe1\x0e\xfe\x5a\x04\xd5"
15975 "\x64\xa3\xf1\x1a\x76\x28\xcc\x35"
15976 "\x36\xa7\x0a\x74\xf7\x1c\x44\x9b"
15977 "\xc7\x1b\x53\x17\x02\xea\xd1\xad"
15978 "\x13\x51\x73\xc0\xa0\xb2\x05\x32"
15979 "\xa8\xa2\x37\x2e\xe1\x7a\x3a\x19"
15980 "\x26\xb4\x6c\x62\x5d\xb3\x1a\x1d"
15981 "\x59\xda\xee\x1a\x22\x18\xda\x0d"
15982 "\x88\x0f\x55\x8b\x72\x62\xfd\xc1"
15983 "\x69\x13\xcd\x0d\x5f\xc1\x09\x52"
15984 "\xee\xd6\xe3\x84\x4d\xee\xf6\x88"
15985 "\xaf\x83\xdc\x76\xf4\xc0\x93\x3f"
15986 "\x4a\x75\x2f\xb0\x0b\x3e\xc4\x54"
15987 "\x7d\x69\x8d\x00\x62\x77\x0d\x14"
15988 "\xbe\x7c\xa6\x7d\xc5\x24\x4f\xf3"
15989 "\x50\xf7\x5f\xf4\xc2\xca\x41\x97"
15990 "\x37\xbe\x75\x74\xcd\xf0\x75\x6e"
15991 "\x25\x23\x94\xbd\xda\x8d\xb0\xd4",
15992 .ilen = 512,
15993 .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
15994 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
15995 "\x10\x11\x12\x13\x14\x15\x16\x17"
15996 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
15997 "\x20\x21\x22\x23\x24\x25\x26\x27"
15998 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
15999 "\x30\x31\x32\x33\x34\x35\x36\x37"
16000 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
16001 "\x40\x41\x42\x43\x44\x45\x46\x47"
16002 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
16003 "\x50\x51\x52\x53\x54\x55\x56\x57"
16004 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
16005 "\x60\x61\x62\x63\x64\x65\x66\x67"
16006 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
16007 "\x70\x71\x72\x73\x74\x75\x76\x77"
16008 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
16009 "\x80\x81\x82\x83\x84\x85\x86\x87"
16010 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
16011 "\x90\x91\x92\x93\x94\x95\x96\x97"
16012 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
16013 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
16014 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
16015 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
16016 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
16017 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
16018 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
16019 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
16020 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
16021 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
16022 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
16023 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
16024 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
16025 "\x00\x01\x02\x03\x04\x05\x06\x07"
16026 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
16027 "\x10\x11\x12\x13\x14\x15\x16\x17"
16028 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
16029 "\x20\x21\x22\x23\x24\x25\x26\x27"
16030 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
16031 "\x30\x31\x32\x33\x34\x35\x36\x37"
16032 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
16033 "\x40\x41\x42\x43\x44\x45\x46\x47"
16034 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
16035 "\x50\x51\x52\x53\x54\x55\x56\x57"
16036 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
16037 "\x60\x61\x62\x63\x64\x65\x66\x67"
16038 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
16039 "\x70\x71\x72\x73\x74\x75\x76\x77"
16040 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
16041 "\x80\x81\x82\x83\x84\x85\x86\x87"
16042 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
16043 "\x90\x91\x92\x93\x94\x95\x96\x97"
16044 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
16045 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
16046 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
16047 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
16048 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
16049 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
16050 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
16051 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
16052 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
16053 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
16054 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
16055 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
16056 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
16057 .rlen = 512,
16058 }, {
16059 .key = "\x27\x18\x28\x18\x28\x45\x90\x45"
16060 "\x23\x53\x60\x28\x74\x71\x35\x26"
16061 "\x62\x49\x77\x57\x24\x70\x93\x69"
16062 "\x99\x59\x57\x49\x66\x96\x76\x27",
16063 .klen = 32,
16064 .iv = "\xff\x00\x00\x00\x00\x00\x00\x00"
16065 "\x00\x00\x00\x00\x00\x00\x00\x00",
16066 .input = "\x55\xed\x71\xd3\x02\x8e\x15\x3b"
16067 "\xc6\x71\x29\x2d\x3e\x89\x9f\x59"
16068 "\x68\x6a\xcc\x8a\x56\x97\xf3\x95"
16069 "\x4e\x51\x08\xda\x2a\xf8\x6f\x3c"
16070 "\x78\x16\xea\x80\xdb\x33\x75\x94"
16071 "\xf9\x29\xc4\x2b\x76\x75\x97\xc7"
16072 "\xf2\x98\x2c\xf9\xff\xc8\xd5\x2b"
16073 "\x18\xf1\xaf\xcf\x7c\xc5\x0b\xee"
16074 "\xad\x3c\x76\x7c\xe6\x27\xa2\x2a"
16075 "\xe4\x66\xe1\xab\xa2\x39\xfc\x7c"
16076 "\xf5\xec\x32\x74\xa3\xb8\x03\x88"
16077 "\x52\xfc\x2e\x56\x3f\xa1\xf0\x9f"
16078 "\x84\x5e\x46\xed\x20\x89\xb6\x44"
16079 "\x8d\xd0\xed\x54\x47\x16\xbe\x95"
16080 "\x8a\xb3\x6b\x72\xc4\x32\x52\x13"
16081 "\x1b\xb0\x82\xbe\xac\xf9\x70\xa6"
16082 "\x44\x18\xdd\x8c\x6e\xca\x6e\x45"
16083 "\x8f\x1e\x10\x07\x57\x25\x98\x7b"
16084 "\x17\x8c\x78\xdd\x80\xa7\xd9\xd8"
16085 "\x63\xaf\xb9\x67\x57\xfd\xbc\xdb"
16086 "\x44\xe9\xc5\x65\xd1\xc7\x3b\xff"
16087 "\x20\xa0\x80\x1a\xc3\x9a\xad\x5e"
16088 "\x5d\x3b\xd3\x07\xd9\xf5\xfd\x3d"
16089 "\x4a\x8b\xa8\xd2\x6e\x7a\x51\x65"
16090 "\x6c\x8e\x95\xe0\x45\xc9\x5f\x4a"
16091 "\x09\x3c\x3d\x71\x7f\x0c\x84\x2a"
16092 "\xc8\x48\x52\x1a\xc2\xd5\xd6\x78"
16093 "\x92\x1e\xa0\x90\x2e\xea\xf0\xf3"
16094 "\xdc\x0f\xb1\xaf\x0d\x9b\x06\x2e"
16095 "\x35\x10\x30\x82\x0d\xe7\xc5\x9b"
16096 "\xde\x44\x18\xbd\x9f\xd1\x45\xa9"
16097 "\x7b\x7a\x4a\xad\x35\x65\x27\xca"
16098 "\xb2\xc3\xd4\x9b\x71\x86\x70\xee"
16099 "\xf1\x89\x3b\x85\x4b\x5b\xaa\xaf"
16100 "\xfc\x42\xc8\x31\x59\xbe\x16\x60"
16101 "\x4f\xf9\xfa\x12\xea\xd0\xa7\x14"
16102 "\xf0\x7a\xf3\xd5\x8d\xbd\x81\xef"
16103 "\x52\x7f\x29\x51\x94\x20\x67\x3c"
16104 "\xd1\xaf\x77\x9f\x22\x5a\x4e\x63"
16105 "\xe7\xff\x73\x25\xd1\xdd\x96\x8a"
16106 "\x98\x52\x6d\xf3\xac\x3e\xf2\x18"
16107 "\x6d\xf6\x0a\x29\xa6\x34\x3d\xed"
16108 "\xe3\x27\x0d\x9d\x0a\x02\x44\x7e"
16109 "\x5a\x7e\x67\x0f\x0a\x9e\xd6\xad"
16110 "\x91\xe6\x4d\x81\x8c\x5c\x59\xaa"
16111 "\xfb\xeb\x56\x53\xd2\x7d\x4c\x81"
16112 "\x65\x53\x0f\x41\x11\xbd\x98\x99"
16113 "\xf9\xc6\xfa\x51\x2e\xa3\xdd\x8d"
16114 "\x84\x98\xf9\x34\xed\x33\x2a\x1f"
16115 "\x82\xed\xc1\x73\x98\xd3\x02\xdc"
16116 "\xe6\xc2\x33\x1d\xa2\xb4\xca\x76"
16117 "\x63\x51\x34\x9d\x96\x12\xae\xce"
16118 "\x83\xc9\x76\x5e\xa4\x1b\x53\x37"
16119 "\x17\xd5\xc0\x80\x1d\x62\xf8\x3d"
16120 "\x54\x27\x74\xbb\x10\x86\x57\x46"
16121 "\x68\xe1\xed\x14\xe7\x9d\xfc\x84"
16122 "\x47\xbc\xc2\xf8\x19\x4b\x99\xcf"
16123 "\x7a\xe9\xc4\xb8\x8c\x82\x72\x4d"
16124 "\x7b\x4f\x38\x55\x36\x71\x64\xc1"
16125 "\xfc\x5c\x75\x52\x33\x02\x18\xf8"
16126 "\x17\xe1\x2b\xc2\x43\x39\xbd\x76"
16127 "\x9b\x63\x76\x32\x2f\x19\x72\x10"
16128 "\x9f\x21\x0c\xf1\x66\x50\x7f\xa5"
16129 "\x0d\x1f\x46\xe0\xba\xd3\x2f\x3c",
16130 .ilen = 512,
16131 .result = "\x00\x01\x02\x03\x04\x05\x06\x07"
16132 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
16133 "\x10\x11\x12\x13\x14\x15\x16\x17"
16134 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
16135 "\x20\x21\x22\x23\x24\x25\x26\x27"
16136 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
16137 "\x30\x31\x32\x33\x34\x35\x36\x37"
16138 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
16139 "\x40\x41\x42\x43\x44\x45\x46\x47"
16140 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
16141 "\x50\x51\x52\x53\x54\x55\x56\x57"
16142 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
16143 "\x60\x61\x62\x63\x64\x65\x66\x67"
16144 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
16145 "\x70\x71\x72\x73\x74\x75\x76\x77"
16146 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
16147 "\x80\x81\x82\x83\x84\x85\x86\x87"
16148 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
16149 "\x90\x91\x92\x93\x94\x95\x96\x97"
16150 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
16151 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
16152 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
16153 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
16154 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
16155 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
16156 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
16157 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
16158 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
16159 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
16160 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
16161 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
16162 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff"
16163 "\x00\x01\x02\x03\x04\x05\x06\x07"
16164 "\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
16165 "\x10\x11\x12\x13\x14\x15\x16\x17"
16166 "\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
16167 "\x20\x21\x22\x23\x24\x25\x26\x27"
16168 "\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f"
16169 "\x30\x31\x32\x33\x34\x35\x36\x37"
16170 "\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f"
16171 "\x40\x41\x42\x43\x44\x45\x46\x47"
16172 "\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f"
16173 "\x50\x51\x52\x53\x54\x55\x56\x57"
16174 "\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f"
16175 "\x60\x61\x62\x63\x64\x65\x66\x67"
16176 "\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f"
16177 "\x70\x71\x72\x73\x74\x75\x76\x77"
16178 "\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f"
16179 "\x80\x81\x82\x83\x84\x85\x86\x87"
16180 "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f"
16181 "\x90\x91\x92\x93\x94\x95\x96\x97"
16182 "\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f"
16183 "\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7"
16184 "\xa8\xa9\xaa\xab\xac\xad\xae\xaf"
16185 "\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7"
16186 "\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf"
16187 "\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7"
16188 "\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf"
16189 "\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7"
16190 "\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf"
16191 "\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7"
16192 "\xe8\xe9\xea\xeb\xec\xed\xee\xef"
16193 "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7"
16194 "\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff",
16195 .rlen = 512,
16196 .also_non_np = 1,
16197 .np = 3,
16198 .tap = { 512 - 20, 4, 16 },
16199 }
16200};
16201
14326/* Cast6 test vectors from RFC 2612 */ 16202/* Cast6 test vectors from RFC 2612 */
14327static const struct cipher_testvec cast6_enc_tv_template[] = { 16203static const struct cipher_testvec cast6_enc_tv_template[] = {
14328 { 16204 {
diff --git a/crypto/xts.c b/crypto/xts.c
index f317c48b5e43..12284183bd20 100644
--- a/crypto/xts.c
+++ b/crypto/xts.c
@@ -357,78 +357,6 @@ static int decrypt(struct skcipher_request *req)
357 return do_decrypt(req, init_crypt(req, decrypt_done)); 357 return do_decrypt(req, init_crypt(req, decrypt_done));
358} 358}
359 359
360int xts_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst,
361 struct scatterlist *ssrc, unsigned int nbytes,
362 struct xts_crypt_req *req)
363{
364 const unsigned int bsize = XTS_BLOCK_SIZE;
365 const unsigned int max_blks = req->tbuflen / bsize;
366 struct blkcipher_walk walk;
367 unsigned int nblocks;
368 le128 *src, *dst, *t;
369 le128 *t_buf = req->tbuf;
370 int err, i;
371
372 BUG_ON(max_blks < 1);
373
374 blkcipher_walk_init(&walk, sdst, ssrc, nbytes);
375
376 err = blkcipher_walk_virt(desc, &walk);
377 nbytes = walk.nbytes;
378 if (!nbytes)
379 return err;
380
381 nblocks = min(nbytes / bsize, max_blks);
382 src = (le128 *)walk.src.virt.addr;
383 dst = (le128 *)walk.dst.virt.addr;
384
385 /* calculate first value of T */
386 req->tweak_fn(req->tweak_ctx, (u8 *)&t_buf[0], walk.iv);
387
388 i = 0;
389 goto first;
390
391 for (;;) {
392 do {
393 for (i = 0; i < nblocks; i++) {
394 gf128mul_x_ble(&t_buf[i], t);
395first:
396 t = &t_buf[i];
397
398 /* PP <- T xor P */
399 le128_xor(dst + i, t, src + i);
400 }
401
402 /* CC <- E(Key2,PP) */
403 req->crypt_fn(req->crypt_ctx, (u8 *)dst,
404 nblocks * bsize);
405
406 /* C <- T xor CC */
407 for (i = 0; i < nblocks; i++)
408 le128_xor(dst + i, dst + i, &t_buf[i]);
409
410 src += nblocks;
411 dst += nblocks;
412 nbytes -= nblocks * bsize;
413 nblocks = min(nbytes / bsize, max_blks);
414 } while (nblocks > 0);
415
416 *(le128 *)walk.iv = *t;
417
418 err = blkcipher_walk_done(desc, &walk, nbytes);
419 nbytes = walk.nbytes;
420 if (!nbytes)
421 break;
422
423 nblocks = min(nbytes / bsize, max_blks);
424 src = (le128 *)walk.src.virt.addr;
425 dst = (le128 *)walk.dst.virt.addr;
426 }
427
428 return err;
429}
430EXPORT_SYMBOL_GPL(xts_crypt);
431
432static int init_tfm(struct crypto_skcipher *tfm) 360static int init_tfm(struct crypto_skcipher *tfm)
433{ 361{
434 struct skcipher_instance *inst = skcipher_alg_instance(tfm); 362 struct skcipher_instance *inst = skcipher_alg_instance(tfm);