aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/crypto
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2015-04-15 13:42:15 -0400
committerLinus Torvalds <torvalds@linux-foundation.org>2015-04-15 13:42:15 -0400
commitcb906953d2c3fd450655d9fa833f03690ad50c23 (patch)
tree06c5665afb24baee3ac49f62db61ca97918079b4 /arch/x86/crypto
parent6c373ca89399c5a3f7ef210ad8f63dc3437da345 (diff)
parent3abafaf2192b1712079edfd4232b19877d6f41a5 (diff)
Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto update from Herbert Xu: "Here is the crypto update for 4.1: New interfaces: - user-space interface for AEAD - user-space interface for RNG (i.e., pseudo RNG) New hashes: - ARMv8 SHA1/256 - ARMv8 AES - ARMv8 GHASH - ARM assembler and NEON SHA256 - MIPS OCTEON SHA1/256/512 - MIPS img-hash SHA1/256 and MD5 - Power 8 VMX AES/CBC/CTR/GHASH - PPC assembler AES, SHA1/256 and MD5 - Broadcom IPROC RNG driver Cleanups/fixes: - prevent internal helper algos from being exposed to user-space - merge common code from assembly/C SHA implementations - misc fixes" * git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (169 commits) crypto: arm - workaround for building with old binutils crypto: arm/sha256 - avoid sha256 code on ARMv7-M crypto: x86/sha512_ssse3 - move SHA-384/512 SSSE3 implementation to base layer crypto: x86/sha256_ssse3 - move SHA-224/256 SSSE3 implementation to base layer crypto: x86/sha1_ssse3 - move SHA-1 SSSE3 implementation to base layer crypto: arm64/sha2-ce - move SHA-224/256 ARMv8 implementation to base layer crypto: arm64/sha1-ce - move SHA-1 ARMv8 implementation to base layer crypto: arm/sha2-ce - move SHA-224/256 ARMv8 implementation to base layer crypto: arm/sha256 - move SHA-224/256 ASM/NEON implementation to base layer crypto: arm/sha1-ce - move SHA-1 ARMv8 implementation to base layer crypto: arm/sha1_neon - move SHA-1 NEON implementation to base layer crypto: arm/sha1 - move SHA-1 ARM asm implementation to base layer crypto: sha512-generic - move to generic glue implementation crypto: sha256-generic - move to generic glue implementation crypto: sha1-generic - move to generic glue implementation crypto: sha512 - implement base layer for SHA-512 crypto: sha256 - implement base layer for SHA-256 crypto: sha1 - implement base layer for SHA-1 crypto: api - remove instance when test failed crypto: api - Move alg ref count init to crypto_check_alg ...
Diffstat (limited to 'arch/x86/crypto')
-rw-r--r--arch/x86/crypto/aesni-intel_glue.c187
-rw-r--r--arch/x86/crypto/camellia_aesni_avx2_glue.c15
-rw-r--r--arch/x86/crypto/camellia_aesni_avx_glue.c15
-rw-r--r--arch/x86/crypto/cast5_avx_glue.c9
-rw-r--r--arch/x86/crypto/cast6_avx_glue.c15
-rw-r--r--arch/x86/crypto/ghash-clmulni-intel_glue.c7
-rw-r--r--arch/x86/crypto/glue_helper.c1
-rw-r--r--arch/x86/crypto/serpent_avx2_glue.c15
-rw-r--r--arch/x86/crypto/serpent_avx_glue.c15
-rw-r--r--arch/x86/crypto/serpent_sse2_glue.c15
-rw-r--r--arch/x86/crypto/sha-mb/sha1_mb.c9
-rw-r--r--arch/x86/crypto/sha-mb/sha1_mb_mgr_init_avx2.c2
-rw-r--r--arch/x86/crypto/sha1_ssse3_glue.c139
-rw-r--r--arch/x86/crypto/sha256-avx-asm.S10
-rw-r--r--arch/x86/crypto/sha256-avx2-asm.S10
-rw-r--r--arch/x86/crypto/sha256-ssse3-asm.S10
-rw-r--r--arch/x86/crypto/sha256_ssse3_glue.c193
-rw-r--r--arch/x86/crypto/sha512-avx-asm.S6
-rw-r--r--arch/x86/crypto/sha512-avx2-asm.S6
-rw-r--r--arch/x86/crypto/sha512-ssse3-asm.S6
-rw-r--r--arch/x86/crypto/sha512_ssse3_glue.c202
-rw-r--r--arch/x86/crypto/twofish_avx_glue.c15
22 files changed, 335 insertions, 567 deletions
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index 54f60ab41c63..112cefacf2af 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -797,7 +797,9 @@ static int rfc4106_init(struct crypto_tfm *tfm)
797 PTR_ALIGN((u8 *)crypto_tfm_ctx(tfm), AESNI_ALIGN); 797 PTR_ALIGN((u8 *)crypto_tfm_ctx(tfm), AESNI_ALIGN);
798 struct crypto_aead *cryptd_child; 798 struct crypto_aead *cryptd_child;
799 struct aesni_rfc4106_gcm_ctx *child_ctx; 799 struct aesni_rfc4106_gcm_ctx *child_ctx;
800 cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni", 0, 0); 800 cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni",
801 CRYPTO_ALG_INTERNAL,
802 CRYPTO_ALG_INTERNAL);
801 if (IS_ERR(cryptd_tfm)) 803 if (IS_ERR(cryptd_tfm))
802 return PTR_ERR(cryptd_tfm); 804 return PTR_ERR(cryptd_tfm);
803 805
@@ -890,15 +892,12 @@ out_free_ablkcipher:
890 return ret; 892 return ret;
891} 893}
892 894
893static int rfc4106_set_key(struct crypto_aead *parent, const u8 *key, 895static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
894 unsigned int key_len) 896 unsigned int key_len)
895{ 897{
896 int ret = 0; 898 int ret = 0;
897 struct crypto_tfm *tfm = crypto_aead_tfm(parent); 899 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
898 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(parent); 900 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
899 struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);
900 struct aesni_rfc4106_gcm_ctx *child_ctx =
901 aesni_rfc4106_gcm_ctx_get(cryptd_child);
902 u8 *new_key_align, *new_key_mem = NULL; 901 u8 *new_key_align, *new_key_mem = NULL;
903 902
904 if (key_len < 4) { 903 if (key_len < 4) {
@@ -943,20 +942,31 @@ static int rfc4106_set_key(struct crypto_aead *parent, const u8 *key,
943 goto exit; 942 goto exit;
944 } 943 }
945 ret = rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len); 944 ret = rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
946 memcpy(child_ctx, ctx, sizeof(*ctx));
947exit: 945exit:
948 kfree(new_key_mem); 946 kfree(new_key_mem);
949 return ret; 947 return ret;
950} 948}
951 949
952/* This is the Integrity Check Value (aka the authentication tag length and can 950static int rfc4106_set_key(struct crypto_aead *parent, const u8 *key,
953 * be 8, 12 or 16 bytes long. */ 951 unsigned int key_len)
954static int rfc4106_set_authsize(struct crypto_aead *parent,
955 unsigned int authsize)
956{ 952{
957 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(parent); 953 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(parent);
958 struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm); 954 struct crypto_aead *child = cryptd_aead_child(ctx->cryptd_tfm);
955 struct aesni_rfc4106_gcm_ctx *c_ctx = aesni_rfc4106_gcm_ctx_get(child);
956 struct cryptd_aead *cryptd_tfm = ctx->cryptd_tfm;
957 int ret;
959 958
959 ret = crypto_aead_setkey(child, key, key_len);
960 if (!ret) {
961 memcpy(ctx, c_ctx, sizeof(*ctx));
962 ctx->cryptd_tfm = cryptd_tfm;
963 }
964 return ret;
965}
966
967static int common_rfc4106_set_authsize(struct crypto_aead *aead,
968 unsigned int authsize)
969{
960 switch (authsize) { 970 switch (authsize) {
961 case 8: 971 case 8:
962 case 12: 972 case 12:
@@ -965,51 +975,23 @@ static int rfc4106_set_authsize(struct crypto_aead *parent,
965 default: 975 default:
966 return -EINVAL; 976 return -EINVAL;
967 } 977 }
968 crypto_aead_crt(parent)->authsize = authsize; 978 crypto_aead_crt(aead)->authsize = authsize;
969 crypto_aead_crt(cryptd_child)->authsize = authsize;
970 return 0; 979 return 0;
971} 980}
972 981
973static int rfc4106_encrypt(struct aead_request *req) 982/* This is the Integrity Check Value (aka the authentication tag length and can
974{ 983 * be 8, 12 or 16 bytes long. */
975 int ret; 984static int rfc4106_set_authsize(struct crypto_aead *parent,
976 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 985 unsigned int authsize)
977 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
978
979 if (!irq_fpu_usable()) {
980 struct aead_request *cryptd_req =
981 (struct aead_request *) aead_request_ctx(req);
982 memcpy(cryptd_req, req, sizeof(*req));
983 aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
984 return crypto_aead_encrypt(cryptd_req);
985 } else {
986 struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);
987 kernel_fpu_begin();
988 ret = cryptd_child->base.crt_aead.encrypt(req);
989 kernel_fpu_end();
990 return ret;
991 }
992}
993
994static int rfc4106_decrypt(struct aead_request *req)
995{ 986{
987 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(parent);
988 struct crypto_aead *child = cryptd_aead_child(ctx->cryptd_tfm);
996 int ret; 989 int ret;
997 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
998 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
999 990
1000 if (!irq_fpu_usable()) { 991 ret = crypto_aead_setauthsize(child, authsize);
1001 struct aead_request *cryptd_req = 992 if (!ret)
1002 (struct aead_request *) aead_request_ctx(req); 993 crypto_aead_crt(parent)->authsize = authsize;
1003 memcpy(cryptd_req, req, sizeof(*req)); 994 return ret;
1004 aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
1005 return crypto_aead_decrypt(cryptd_req);
1006 } else {
1007 struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);
1008 kernel_fpu_begin();
1009 ret = cryptd_child->base.crt_aead.decrypt(req);
1010 kernel_fpu_end();
1011 return ret;
1012 }
1013} 995}
1014 996
1015static int __driver_rfc4106_encrypt(struct aead_request *req) 997static int __driver_rfc4106_encrypt(struct aead_request *req)
@@ -1185,6 +1167,78 @@ static int __driver_rfc4106_decrypt(struct aead_request *req)
1185 } 1167 }
1186 return retval; 1168 return retval;
1187} 1169}
1170
1171static int rfc4106_encrypt(struct aead_request *req)
1172{
1173 int ret;
1174 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1175 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
1176
1177 if (!irq_fpu_usable()) {
1178 struct aead_request *cryptd_req =
1179 (struct aead_request *) aead_request_ctx(req);
1180
1181 memcpy(cryptd_req, req, sizeof(*req));
1182 aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
1183 ret = crypto_aead_encrypt(cryptd_req);
1184 } else {
1185 kernel_fpu_begin();
1186 ret = __driver_rfc4106_encrypt(req);
1187 kernel_fpu_end();
1188 }
1189 return ret;
1190}
1191
1192static int rfc4106_decrypt(struct aead_request *req)
1193{
1194 int ret;
1195 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1196 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
1197
1198 if (!irq_fpu_usable()) {
1199 struct aead_request *cryptd_req =
1200 (struct aead_request *) aead_request_ctx(req);
1201
1202 memcpy(cryptd_req, req, sizeof(*req));
1203 aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
1204 ret = crypto_aead_decrypt(cryptd_req);
1205 } else {
1206 kernel_fpu_begin();
1207 ret = __driver_rfc4106_decrypt(req);
1208 kernel_fpu_end();
1209 }
1210 return ret;
1211}
1212
1213static int helper_rfc4106_encrypt(struct aead_request *req)
1214{
1215 int ret;
1216
1217 if (unlikely(!irq_fpu_usable())) {
1218 WARN_ONCE(1, "__gcm-aes-aesni alg used in invalid context");
1219 ret = -EINVAL;
1220 } else {
1221 kernel_fpu_begin();
1222 ret = __driver_rfc4106_encrypt(req);
1223 kernel_fpu_end();
1224 }
1225 return ret;
1226}
1227
1228static int helper_rfc4106_decrypt(struct aead_request *req)
1229{
1230 int ret;
1231
1232 if (unlikely(!irq_fpu_usable())) {
1233 WARN_ONCE(1, "__gcm-aes-aesni alg used in invalid context");
1234 ret = -EINVAL;
1235 } else {
1236 kernel_fpu_begin();
1237 ret = __driver_rfc4106_decrypt(req);
1238 kernel_fpu_end();
1239 }
1240 return ret;
1241}
1188#endif 1242#endif
1189 1243
1190static struct crypto_alg aesni_algs[] = { { 1244static struct crypto_alg aesni_algs[] = { {
@@ -1210,7 +1264,7 @@ static struct crypto_alg aesni_algs[] = { {
1210 .cra_name = "__aes-aesni", 1264 .cra_name = "__aes-aesni",
1211 .cra_driver_name = "__driver-aes-aesni", 1265 .cra_driver_name = "__driver-aes-aesni",
1212 .cra_priority = 0, 1266 .cra_priority = 0,
1213 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 1267 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL,
1214 .cra_blocksize = AES_BLOCK_SIZE, 1268 .cra_blocksize = AES_BLOCK_SIZE,
1215 .cra_ctxsize = sizeof(struct crypto_aes_ctx) + 1269 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1216 AESNI_ALIGN - 1, 1270 AESNI_ALIGN - 1,
@@ -1229,7 +1283,8 @@ static struct crypto_alg aesni_algs[] = { {
1229 .cra_name = "__ecb-aes-aesni", 1283 .cra_name = "__ecb-aes-aesni",
1230 .cra_driver_name = "__driver-ecb-aes-aesni", 1284 .cra_driver_name = "__driver-ecb-aes-aesni",
1231 .cra_priority = 0, 1285 .cra_priority = 0,
1232 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 1286 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1287 CRYPTO_ALG_INTERNAL,
1233 .cra_blocksize = AES_BLOCK_SIZE, 1288 .cra_blocksize = AES_BLOCK_SIZE,
1234 .cra_ctxsize = sizeof(struct crypto_aes_ctx) + 1289 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1235 AESNI_ALIGN - 1, 1290 AESNI_ALIGN - 1,
@@ -1249,7 +1304,8 @@ static struct crypto_alg aesni_algs[] = { {
1249 .cra_name = "__cbc-aes-aesni", 1304 .cra_name = "__cbc-aes-aesni",
1250 .cra_driver_name = "__driver-cbc-aes-aesni", 1305 .cra_driver_name = "__driver-cbc-aes-aesni",
1251 .cra_priority = 0, 1306 .cra_priority = 0,
1252 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 1307 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1308 CRYPTO_ALG_INTERNAL,
1253 .cra_blocksize = AES_BLOCK_SIZE, 1309 .cra_blocksize = AES_BLOCK_SIZE,
1254 .cra_ctxsize = sizeof(struct crypto_aes_ctx) + 1310 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1255 AESNI_ALIGN - 1, 1311 AESNI_ALIGN - 1,
@@ -1313,7 +1369,8 @@ static struct crypto_alg aesni_algs[] = { {
1313 .cra_name = "__ctr-aes-aesni", 1369 .cra_name = "__ctr-aes-aesni",
1314 .cra_driver_name = "__driver-ctr-aes-aesni", 1370 .cra_driver_name = "__driver-ctr-aes-aesni",
1315 .cra_priority = 0, 1371 .cra_priority = 0,
1316 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 1372 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1373 CRYPTO_ALG_INTERNAL,
1317 .cra_blocksize = 1, 1374 .cra_blocksize = 1,
1318 .cra_ctxsize = sizeof(struct crypto_aes_ctx) + 1375 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1319 AESNI_ALIGN - 1, 1376 AESNI_ALIGN - 1,
@@ -1357,7 +1414,7 @@ static struct crypto_alg aesni_algs[] = { {
1357 .cra_name = "__gcm-aes-aesni", 1414 .cra_name = "__gcm-aes-aesni",
1358 .cra_driver_name = "__driver-gcm-aes-aesni", 1415 .cra_driver_name = "__driver-gcm-aes-aesni",
1359 .cra_priority = 0, 1416 .cra_priority = 0,
1360 .cra_flags = CRYPTO_ALG_TYPE_AEAD, 1417 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_INTERNAL,
1361 .cra_blocksize = 1, 1418 .cra_blocksize = 1,
1362 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) + 1419 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) +
1363 AESNI_ALIGN, 1420 AESNI_ALIGN,
@@ -1366,8 +1423,12 @@ static struct crypto_alg aesni_algs[] = { {
1366 .cra_module = THIS_MODULE, 1423 .cra_module = THIS_MODULE,
1367 .cra_u = { 1424 .cra_u = {
1368 .aead = { 1425 .aead = {
1369 .encrypt = __driver_rfc4106_encrypt, 1426 .setkey = common_rfc4106_set_key,
1370 .decrypt = __driver_rfc4106_decrypt, 1427 .setauthsize = common_rfc4106_set_authsize,
1428 .encrypt = helper_rfc4106_encrypt,
1429 .decrypt = helper_rfc4106_decrypt,
1430 .ivsize = 8,
1431 .maxauthsize = 16,
1371 }, 1432 },
1372 }, 1433 },
1373}, { 1434}, {
@@ -1423,7 +1484,8 @@ static struct crypto_alg aesni_algs[] = { {
1423 .cra_name = "__lrw-aes-aesni", 1484 .cra_name = "__lrw-aes-aesni",
1424 .cra_driver_name = "__driver-lrw-aes-aesni", 1485 .cra_driver_name = "__driver-lrw-aes-aesni",
1425 .cra_priority = 0, 1486 .cra_priority = 0,
1426 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 1487 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1488 CRYPTO_ALG_INTERNAL,
1427 .cra_blocksize = AES_BLOCK_SIZE, 1489 .cra_blocksize = AES_BLOCK_SIZE,
1428 .cra_ctxsize = sizeof(struct aesni_lrw_ctx), 1490 .cra_ctxsize = sizeof(struct aesni_lrw_ctx),
1429 .cra_alignmask = 0, 1491 .cra_alignmask = 0,
@@ -1444,7 +1506,8 @@ static struct crypto_alg aesni_algs[] = { {
1444 .cra_name = "__xts-aes-aesni", 1506 .cra_name = "__xts-aes-aesni",
1445 .cra_driver_name = "__driver-xts-aes-aesni", 1507 .cra_driver_name = "__driver-xts-aes-aesni",
1446 .cra_priority = 0, 1508 .cra_priority = 0,
1447 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 1509 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1510 CRYPTO_ALG_INTERNAL,
1448 .cra_blocksize = AES_BLOCK_SIZE, 1511 .cra_blocksize = AES_BLOCK_SIZE,
1449 .cra_ctxsize = sizeof(struct aesni_xts_ctx), 1512 .cra_ctxsize = sizeof(struct aesni_xts_ctx),
1450 .cra_alignmask = 0, 1513 .cra_alignmask = 0,
diff --git a/arch/x86/crypto/camellia_aesni_avx2_glue.c b/arch/x86/crypto/camellia_aesni_avx2_glue.c
index 9a07fafe3831..baf0ac21ace5 100644
--- a/arch/x86/crypto/camellia_aesni_avx2_glue.c
+++ b/arch/x86/crypto/camellia_aesni_avx2_glue.c
@@ -343,7 +343,8 @@ static struct crypto_alg cmll_algs[10] = { {
343 .cra_name = "__ecb-camellia-aesni-avx2", 343 .cra_name = "__ecb-camellia-aesni-avx2",
344 .cra_driver_name = "__driver-ecb-camellia-aesni-avx2", 344 .cra_driver_name = "__driver-ecb-camellia-aesni-avx2",
345 .cra_priority = 0, 345 .cra_priority = 0,
346 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 346 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
347 CRYPTO_ALG_INTERNAL,
347 .cra_blocksize = CAMELLIA_BLOCK_SIZE, 348 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
348 .cra_ctxsize = sizeof(struct camellia_ctx), 349 .cra_ctxsize = sizeof(struct camellia_ctx),
349 .cra_alignmask = 0, 350 .cra_alignmask = 0,
@@ -362,7 +363,8 @@ static struct crypto_alg cmll_algs[10] = { {
362 .cra_name = "__cbc-camellia-aesni-avx2", 363 .cra_name = "__cbc-camellia-aesni-avx2",
363 .cra_driver_name = "__driver-cbc-camellia-aesni-avx2", 364 .cra_driver_name = "__driver-cbc-camellia-aesni-avx2",
364 .cra_priority = 0, 365 .cra_priority = 0,
365 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 366 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
367 CRYPTO_ALG_INTERNAL,
366 .cra_blocksize = CAMELLIA_BLOCK_SIZE, 368 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
367 .cra_ctxsize = sizeof(struct camellia_ctx), 369 .cra_ctxsize = sizeof(struct camellia_ctx),
368 .cra_alignmask = 0, 370 .cra_alignmask = 0,
@@ -381,7 +383,8 @@ static struct crypto_alg cmll_algs[10] = { {
381 .cra_name = "__ctr-camellia-aesni-avx2", 383 .cra_name = "__ctr-camellia-aesni-avx2",
382 .cra_driver_name = "__driver-ctr-camellia-aesni-avx2", 384 .cra_driver_name = "__driver-ctr-camellia-aesni-avx2",
383 .cra_priority = 0, 385 .cra_priority = 0,
384 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 386 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
387 CRYPTO_ALG_INTERNAL,
385 .cra_blocksize = 1, 388 .cra_blocksize = 1,
386 .cra_ctxsize = sizeof(struct camellia_ctx), 389 .cra_ctxsize = sizeof(struct camellia_ctx),
387 .cra_alignmask = 0, 390 .cra_alignmask = 0,
@@ -401,7 +404,8 @@ static struct crypto_alg cmll_algs[10] = { {
401 .cra_name = "__lrw-camellia-aesni-avx2", 404 .cra_name = "__lrw-camellia-aesni-avx2",
402 .cra_driver_name = "__driver-lrw-camellia-aesni-avx2", 405 .cra_driver_name = "__driver-lrw-camellia-aesni-avx2",
403 .cra_priority = 0, 406 .cra_priority = 0,
404 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 407 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
408 CRYPTO_ALG_INTERNAL,
405 .cra_blocksize = CAMELLIA_BLOCK_SIZE, 409 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
406 .cra_ctxsize = sizeof(struct camellia_lrw_ctx), 410 .cra_ctxsize = sizeof(struct camellia_lrw_ctx),
407 .cra_alignmask = 0, 411 .cra_alignmask = 0,
@@ -424,7 +428,8 @@ static struct crypto_alg cmll_algs[10] = { {
424 .cra_name = "__xts-camellia-aesni-avx2", 428 .cra_name = "__xts-camellia-aesni-avx2",
425 .cra_driver_name = "__driver-xts-camellia-aesni-avx2", 429 .cra_driver_name = "__driver-xts-camellia-aesni-avx2",
426 .cra_priority = 0, 430 .cra_priority = 0,
427 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 431 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
432 CRYPTO_ALG_INTERNAL,
428 .cra_blocksize = CAMELLIA_BLOCK_SIZE, 433 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
429 .cra_ctxsize = sizeof(struct camellia_xts_ctx), 434 .cra_ctxsize = sizeof(struct camellia_xts_ctx),
430 .cra_alignmask = 0, 435 .cra_alignmask = 0,
diff --git a/arch/x86/crypto/camellia_aesni_avx_glue.c b/arch/x86/crypto/camellia_aesni_avx_glue.c
index ed38d959add6..78818a1e73e3 100644
--- a/arch/x86/crypto/camellia_aesni_avx_glue.c
+++ b/arch/x86/crypto/camellia_aesni_avx_glue.c
@@ -335,7 +335,8 @@ static struct crypto_alg cmll_algs[10] = { {
335 .cra_name = "__ecb-camellia-aesni", 335 .cra_name = "__ecb-camellia-aesni",
336 .cra_driver_name = "__driver-ecb-camellia-aesni", 336 .cra_driver_name = "__driver-ecb-camellia-aesni",
337 .cra_priority = 0, 337 .cra_priority = 0,
338 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 338 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
339 CRYPTO_ALG_INTERNAL,
339 .cra_blocksize = CAMELLIA_BLOCK_SIZE, 340 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
340 .cra_ctxsize = sizeof(struct camellia_ctx), 341 .cra_ctxsize = sizeof(struct camellia_ctx),
341 .cra_alignmask = 0, 342 .cra_alignmask = 0,
@@ -354,7 +355,8 @@ static struct crypto_alg cmll_algs[10] = { {
354 .cra_name = "__cbc-camellia-aesni", 355 .cra_name = "__cbc-camellia-aesni",
355 .cra_driver_name = "__driver-cbc-camellia-aesni", 356 .cra_driver_name = "__driver-cbc-camellia-aesni",
356 .cra_priority = 0, 357 .cra_priority = 0,
357 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 358 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
359 CRYPTO_ALG_INTERNAL,
358 .cra_blocksize = CAMELLIA_BLOCK_SIZE, 360 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
359 .cra_ctxsize = sizeof(struct camellia_ctx), 361 .cra_ctxsize = sizeof(struct camellia_ctx),
360 .cra_alignmask = 0, 362 .cra_alignmask = 0,
@@ -373,7 +375,8 @@ static struct crypto_alg cmll_algs[10] = { {
373 .cra_name = "__ctr-camellia-aesni", 375 .cra_name = "__ctr-camellia-aesni",
374 .cra_driver_name = "__driver-ctr-camellia-aesni", 376 .cra_driver_name = "__driver-ctr-camellia-aesni",
375 .cra_priority = 0, 377 .cra_priority = 0,
376 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 378 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
379 CRYPTO_ALG_INTERNAL,
377 .cra_blocksize = 1, 380 .cra_blocksize = 1,
378 .cra_ctxsize = sizeof(struct camellia_ctx), 381 .cra_ctxsize = sizeof(struct camellia_ctx),
379 .cra_alignmask = 0, 382 .cra_alignmask = 0,
@@ -393,7 +396,8 @@ static struct crypto_alg cmll_algs[10] = { {
393 .cra_name = "__lrw-camellia-aesni", 396 .cra_name = "__lrw-camellia-aesni",
394 .cra_driver_name = "__driver-lrw-camellia-aesni", 397 .cra_driver_name = "__driver-lrw-camellia-aesni",
395 .cra_priority = 0, 398 .cra_priority = 0,
396 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 399 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
400 CRYPTO_ALG_INTERNAL,
397 .cra_blocksize = CAMELLIA_BLOCK_SIZE, 401 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
398 .cra_ctxsize = sizeof(struct camellia_lrw_ctx), 402 .cra_ctxsize = sizeof(struct camellia_lrw_ctx),
399 .cra_alignmask = 0, 403 .cra_alignmask = 0,
@@ -416,7 +420,8 @@ static struct crypto_alg cmll_algs[10] = { {
416 .cra_name = "__xts-camellia-aesni", 420 .cra_name = "__xts-camellia-aesni",
417 .cra_driver_name = "__driver-xts-camellia-aesni", 421 .cra_driver_name = "__driver-xts-camellia-aesni",
418 .cra_priority = 0, 422 .cra_priority = 0,
419 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 423 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
424 CRYPTO_ALG_INTERNAL,
420 .cra_blocksize = CAMELLIA_BLOCK_SIZE, 425 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
421 .cra_ctxsize = sizeof(struct camellia_xts_ctx), 426 .cra_ctxsize = sizeof(struct camellia_xts_ctx),
422 .cra_alignmask = 0, 427 .cra_alignmask = 0,
diff --git a/arch/x86/crypto/cast5_avx_glue.c b/arch/x86/crypto/cast5_avx_glue.c
index 60ada677a928..236c80974457 100644
--- a/arch/x86/crypto/cast5_avx_glue.c
+++ b/arch/x86/crypto/cast5_avx_glue.c
@@ -341,7 +341,8 @@ static struct crypto_alg cast5_algs[6] = { {
341 .cra_name = "__ecb-cast5-avx", 341 .cra_name = "__ecb-cast5-avx",
342 .cra_driver_name = "__driver-ecb-cast5-avx", 342 .cra_driver_name = "__driver-ecb-cast5-avx",
343 .cra_priority = 0, 343 .cra_priority = 0,
344 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 344 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
345 CRYPTO_ALG_INTERNAL,
345 .cra_blocksize = CAST5_BLOCK_SIZE, 346 .cra_blocksize = CAST5_BLOCK_SIZE,
346 .cra_ctxsize = sizeof(struct cast5_ctx), 347 .cra_ctxsize = sizeof(struct cast5_ctx),
347 .cra_alignmask = 0, 348 .cra_alignmask = 0,
@@ -360,7 +361,8 @@ static struct crypto_alg cast5_algs[6] = { {
360 .cra_name = "__cbc-cast5-avx", 361 .cra_name = "__cbc-cast5-avx",
361 .cra_driver_name = "__driver-cbc-cast5-avx", 362 .cra_driver_name = "__driver-cbc-cast5-avx",
362 .cra_priority = 0, 363 .cra_priority = 0,
363 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 364 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
365 CRYPTO_ALG_INTERNAL,
364 .cra_blocksize = CAST5_BLOCK_SIZE, 366 .cra_blocksize = CAST5_BLOCK_SIZE,
365 .cra_ctxsize = sizeof(struct cast5_ctx), 367 .cra_ctxsize = sizeof(struct cast5_ctx),
366 .cra_alignmask = 0, 368 .cra_alignmask = 0,
@@ -379,7 +381,8 @@ static struct crypto_alg cast5_algs[6] = { {
379 .cra_name = "__ctr-cast5-avx", 381 .cra_name = "__ctr-cast5-avx",
380 .cra_driver_name = "__driver-ctr-cast5-avx", 382 .cra_driver_name = "__driver-ctr-cast5-avx",
381 .cra_priority = 0, 383 .cra_priority = 0,
382 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 384 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
385 CRYPTO_ALG_INTERNAL,
383 .cra_blocksize = 1, 386 .cra_blocksize = 1,
384 .cra_ctxsize = sizeof(struct cast5_ctx), 387 .cra_ctxsize = sizeof(struct cast5_ctx),
385 .cra_alignmask = 0, 388 .cra_alignmask = 0,
diff --git a/arch/x86/crypto/cast6_avx_glue.c b/arch/x86/crypto/cast6_avx_glue.c
index 0160f68a57ff..f448810ca4ac 100644
--- a/arch/x86/crypto/cast6_avx_glue.c
+++ b/arch/x86/crypto/cast6_avx_glue.c
@@ -372,7 +372,8 @@ static struct crypto_alg cast6_algs[10] = { {
372 .cra_name = "__ecb-cast6-avx", 372 .cra_name = "__ecb-cast6-avx",
373 .cra_driver_name = "__driver-ecb-cast6-avx", 373 .cra_driver_name = "__driver-ecb-cast6-avx",
374 .cra_priority = 0, 374 .cra_priority = 0,
375 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 375 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
376 CRYPTO_ALG_INTERNAL,
376 .cra_blocksize = CAST6_BLOCK_SIZE, 377 .cra_blocksize = CAST6_BLOCK_SIZE,
377 .cra_ctxsize = sizeof(struct cast6_ctx), 378 .cra_ctxsize = sizeof(struct cast6_ctx),
378 .cra_alignmask = 0, 379 .cra_alignmask = 0,
@@ -391,7 +392,8 @@ static struct crypto_alg cast6_algs[10] = { {
391 .cra_name = "__cbc-cast6-avx", 392 .cra_name = "__cbc-cast6-avx",
392 .cra_driver_name = "__driver-cbc-cast6-avx", 393 .cra_driver_name = "__driver-cbc-cast6-avx",
393 .cra_priority = 0, 394 .cra_priority = 0,
394 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 395 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
396 CRYPTO_ALG_INTERNAL,
395 .cra_blocksize = CAST6_BLOCK_SIZE, 397 .cra_blocksize = CAST6_BLOCK_SIZE,
396 .cra_ctxsize = sizeof(struct cast6_ctx), 398 .cra_ctxsize = sizeof(struct cast6_ctx),
397 .cra_alignmask = 0, 399 .cra_alignmask = 0,
@@ -410,7 +412,8 @@ static struct crypto_alg cast6_algs[10] = { {
410 .cra_name = "__ctr-cast6-avx", 412 .cra_name = "__ctr-cast6-avx",
411 .cra_driver_name = "__driver-ctr-cast6-avx", 413 .cra_driver_name = "__driver-ctr-cast6-avx",
412 .cra_priority = 0, 414 .cra_priority = 0,
413 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 415 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
416 CRYPTO_ALG_INTERNAL,
414 .cra_blocksize = 1, 417 .cra_blocksize = 1,
415 .cra_ctxsize = sizeof(struct cast6_ctx), 418 .cra_ctxsize = sizeof(struct cast6_ctx),
416 .cra_alignmask = 0, 419 .cra_alignmask = 0,
@@ -430,7 +433,8 @@ static struct crypto_alg cast6_algs[10] = { {
430 .cra_name = "__lrw-cast6-avx", 433 .cra_name = "__lrw-cast6-avx",
431 .cra_driver_name = "__driver-lrw-cast6-avx", 434 .cra_driver_name = "__driver-lrw-cast6-avx",
432 .cra_priority = 0, 435 .cra_priority = 0,
433 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 436 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
437 CRYPTO_ALG_INTERNAL,
434 .cra_blocksize = CAST6_BLOCK_SIZE, 438 .cra_blocksize = CAST6_BLOCK_SIZE,
435 .cra_ctxsize = sizeof(struct cast6_lrw_ctx), 439 .cra_ctxsize = sizeof(struct cast6_lrw_ctx),
436 .cra_alignmask = 0, 440 .cra_alignmask = 0,
@@ -453,7 +457,8 @@ static struct crypto_alg cast6_algs[10] = { {
453 .cra_name = "__xts-cast6-avx", 457 .cra_name = "__xts-cast6-avx",
454 .cra_driver_name = "__driver-xts-cast6-avx", 458 .cra_driver_name = "__driver-xts-cast6-avx",
455 .cra_priority = 0, 459 .cra_priority = 0,
456 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 460 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
461 CRYPTO_ALG_INTERNAL,
457 .cra_blocksize = CAST6_BLOCK_SIZE, 462 .cra_blocksize = CAST6_BLOCK_SIZE,
458 .cra_ctxsize = sizeof(struct cast6_xts_ctx), 463 .cra_ctxsize = sizeof(struct cast6_xts_ctx),
459 .cra_alignmask = 0, 464 .cra_alignmask = 0,
diff --git a/arch/x86/crypto/ghash-clmulni-intel_glue.c b/arch/x86/crypto/ghash-clmulni-intel_glue.c
index 8253d85aa165..2079baf06bdd 100644
--- a/arch/x86/crypto/ghash-clmulni-intel_glue.c
+++ b/arch/x86/crypto/ghash-clmulni-intel_glue.c
@@ -154,7 +154,8 @@ static struct shash_alg ghash_alg = {
154 .cra_name = "__ghash", 154 .cra_name = "__ghash",
155 .cra_driver_name = "__ghash-pclmulqdqni", 155 .cra_driver_name = "__ghash-pclmulqdqni",
156 .cra_priority = 0, 156 .cra_priority = 0,
157 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 157 .cra_flags = CRYPTO_ALG_TYPE_SHASH |
158 CRYPTO_ALG_INTERNAL,
158 .cra_blocksize = GHASH_BLOCK_SIZE, 159 .cra_blocksize = GHASH_BLOCK_SIZE,
159 .cra_ctxsize = sizeof(struct ghash_ctx), 160 .cra_ctxsize = sizeof(struct ghash_ctx),
160 .cra_module = THIS_MODULE, 161 .cra_module = THIS_MODULE,
@@ -261,7 +262,9 @@ static int ghash_async_init_tfm(struct crypto_tfm *tfm)
261 struct cryptd_ahash *cryptd_tfm; 262 struct cryptd_ahash *cryptd_tfm;
262 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm); 263 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
263 264
264 cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni", 0, 0); 265 cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni",
266 CRYPTO_ALG_INTERNAL,
267 CRYPTO_ALG_INTERNAL);
265 if (IS_ERR(cryptd_tfm)) 268 if (IS_ERR(cryptd_tfm))
266 return PTR_ERR(cryptd_tfm); 269 return PTR_ERR(cryptd_tfm);
267 ctx->cryptd_tfm = cryptd_tfm; 270 ctx->cryptd_tfm = cryptd_tfm;
diff --git a/arch/x86/crypto/glue_helper.c b/arch/x86/crypto/glue_helper.c
index 432f1d76ceb8..6a85598931b5 100644
--- a/arch/x86/crypto/glue_helper.c
+++ b/arch/x86/crypto/glue_helper.c
@@ -232,7 +232,6 @@ static void glue_ctr_crypt_final_128bit(const common_glue_ctr_func_t fn_ctr,
232 232
233 le128_to_be128((be128 *)walk->iv, &ctrblk); 233 le128_to_be128((be128 *)walk->iv, &ctrblk);
234} 234}
235EXPORT_SYMBOL_GPL(glue_ctr_crypt_final_128bit);
236 235
237static unsigned int __glue_ctr_crypt_128bit(const struct common_glue_ctx *gctx, 236static unsigned int __glue_ctr_crypt_128bit(const struct common_glue_ctx *gctx,
238 struct blkcipher_desc *desc, 237 struct blkcipher_desc *desc,
diff --git a/arch/x86/crypto/serpent_avx2_glue.c b/arch/x86/crypto/serpent_avx2_glue.c
index 437e47a4d302..2f63dc89e7a9 100644
--- a/arch/x86/crypto/serpent_avx2_glue.c
+++ b/arch/x86/crypto/serpent_avx2_glue.c
@@ -309,7 +309,8 @@ static struct crypto_alg srp_algs[10] = { {
309 .cra_name = "__ecb-serpent-avx2", 309 .cra_name = "__ecb-serpent-avx2",
310 .cra_driver_name = "__driver-ecb-serpent-avx2", 310 .cra_driver_name = "__driver-ecb-serpent-avx2",
311 .cra_priority = 0, 311 .cra_priority = 0,
312 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 312 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
313 CRYPTO_ALG_INTERNAL,
313 .cra_blocksize = SERPENT_BLOCK_SIZE, 314 .cra_blocksize = SERPENT_BLOCK_SIZE,
314 .cra_ctxsize = sizeof(struct serpent_ctx), 315 .cra_ctxsize = sizeof(struct serpent_ctx),
315 .cra_alignmask = 0, 316 .cra_alignmask = 0,
@@ -329,7 +330,8 @@ static struct crypto_alg srp_algs[10] = { {
329 .cra_name = "__cbc-serpent-avx2", 330 .cra_name = "__cbc-serpent-avx2",
330 .cra_driver_name = "__driver-cbc-serpent-avx2", 331 .cra_driver_name = "__driver-cbc-serpent-avx2",
331 .cra_priority = 0, 332 .cra_priority = 0,
332 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 333 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
334 CRYPTO_ALG_INTERNAL,
333 .cra_blocksize = SERPENT_BLOCK_SIZE, 335 .cra_blocksize = SERPENT_BLOCK_SIZE,
334 .cra_ctxsize = sizeof(struct serpent_ctx), 336 .cra_ctxsize = sizeof(struct serpent_ctx),
335 .cra_alignmask = 0, 337 .cra_alignmask = 0,
@@ -349,7 +351,8 @@ static struct crypto_alg srp_algs[10] = { {
349 .cra_name = "__ctr-serpent-avx2", 351 .cra_name = "__ctr-serpent-avx2",
350 .cra_driver_name = "__driver-ctr-serpent-avx2", 352 .cra_driver_name = "__driver-ctr-serpent-avx2",
351 .cra_priority = 0, 353 .cra_priority = 0,
352 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 354 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
355 CRYPTO_ALG_INTERNAL,
353 .cra_blocksize = 1, 356 .cra_blocksize = 1,
354 .cra_ctxsize = sizeof(struct serpent_ctx), 357 .cra_ctxsize = sizeof(struct serpent_ctx),
355 .cra_alignmask = 0, 358 .cra_alignmask = 0,
@@ -370,7 +373,8 @@ static struct crypto_alg srp_algs[10] = { {
370 .cra_name = "__lrw-serpent-avx2", 373 .cra_name = "__lrw-serpent-avx2",
371 .cra_driver_name = "__driver-lrw-serpent-avx2", 374 .cra_driver_name = "__driver-lrw-serpent-avx2",
372 .cra_priority = 0, 375 .cra_priority = 0,
373 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 376 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
377 CRYPTO_ALG_INTERNAL,
374 .cra_blocksize = SERPENT_BLOCK_SIZE, 378 .cra_blocksize = SERPENT_BLOCK_SIZE,
375 .cra_ctxsize = sizeof(struct serpent_lrw_ctx), 379 .cra_ctxsize = sizeof(struct serpent_lrw_ctx),
376 .cra_alignmask = 0, 380 .cra_alignmask = 0,
@@ -394,7 +398,8 @@ static struct crypto_alg srp_algs[10] = { {
394 .cra_name = "__xts-serpent-avx2", 398 .cra_name = "__xts-serpent-avx2",
395 .cra_driver_name = "__driver-xts-serpent-avx2", 399 .cra_driver_name = "__driver-xts-serpent-avx2",
396 .cra_priority = 0, 400 .cra_priority = 0,
397 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 401 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
402 CRYPTO_ALG_INTERNAL,
398 .cra_blocksize = SERPENT_BLOCK_SIZE, 403 .cra_blocksize = SERPENT_BLOCK_SIZE,
399 .cra_ctxsize = sizeof(struct serpent_xts_ctx), 404 .cra_ctxsize = sizeof(struct serpent_xts_ctx),
400 .cra_alignmask = 0, 405 .cra_alignmask = 0,
diff --git a/arch/x86/crypto/serpent_avx_glue.c b/arch/x86/crypto/serpent_avx_glue.c
index 7e217398b4eb..c8d478af8456 100644
--- a/arch/x86/crypto/serpent_avx_glue.c
+++ b/arch/x86/crypto/serpent_avx_glue.c
@@ -378,7 +378,8 @@ static struct crypto_alg serpent_algs[10] = { {
378 .cra_name = "__ecb-serpent-avx", 378 .cra_name = "__ecb-serpent-avx",
379 .cra_driver_name = "__driver-ecb-serpent-avx", 379 .cra_driver_name = "__driver-ecb-serpent-avx",
380 .cra_priority = 0, 380 .cra_priority = 0,
381 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 381 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
382 CRYPTO_ALG_INTERNAL,
382 .cra_blocksize = SERPENT_BLOCK_SIZE, 383 .cra_blocksize = SERPENT_BLOCK_SIZE,
383 .cra_ctxsize = sizeof(struct serpent_ctx), 384 .cra_ctxsize = sizeof(struct serpent_ctx),
384 .cra_alignmask = 0, 385 .cra_alignmask = 0,
@@ -397,7 +398,8 @@ static struct crypto_alg serpent_algs[10] = { {
397 .cra_name = "__cbc-serpent-avx", 398 .cra_name = "__cbc-serpent-avx",
398 .cra_driver_name = "__driver-cbc-serpent-avx", 399 .cra_driver_name = "__driver-cbc-serpent-avx",
399 .cra_priority = 0, 400 .cra_priority = 0,
400 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 401 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
402 CRYPTO_ALG_INTERNAL,
401 .cra_blocksize = SERPENT_BLOCK_SIZE, 403 .cra_blocksize = SERPENT_BLOCK_SIZE,
402 .cra_ctxsize = sizeof(struct serpent_ctx), 404 .cra_ctxsize = sizeof(struct serpent_ctx),
403 .cra_alignmask = 0, 405 .cra_alignmask = 0,
@@ -416,7 +418,8 @@ static struct crypto_alg serpent_algs[10] = { {
416 .cra_name = "__ctr-serpent-avx", 418 .cra_name = "__ctr-serpent-avx",
417 .cra_driver_name = "__driver-ctr-serpent-avx", 419 .cra_driver_name = "__driver-ctr-serpent-avx",
418 .cra_priority = 0, 420 .cra_priority = 0,
419 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 421 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
422 CRYPTO_ALG_INTERNAL,
420 .cra_blocksize = 1, 423 .cra_blocksize = 1,
421 .cra_ctxsize = sizeof(struct serpent_ctx), 424 .cra_ctxsize = sizeof(struct serpent_ctx),
422 .cra_alignmask = 0, 425 .cra_alignmask = 0,
@@ -436,7 +439,8 @@ static struct crypto_alg serpent_algs[10] = { {
436 .cra_name = "__lrw-serpent-avx", 439 .cra_name = "__lrw-serpent-avx",
437 .cra_driver_name = "__driver-lrw-serpent-avx", 440 .cra_driver_name = "__driver-lrw-serpent-avx",
438 .cra_priority = 0, 441 .cra_priority = 0,
439 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 442 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
443 CRYPTO_ALG_INTERNAL,
440 .cra_blocksize = SERPENT_BLOCK_SIZE, 444 .cra_blocksize = SERPENT_BLOCK_SIZE,
441 .cra_ctxsize = sizeof(struct serpent_lrw_ctx), 445 .cra_ctxsize = sizeof(struct serpent_lrw_ctx),
442 .cra_alignmask = 0, 446 .cra_alignmask = 0,
@@ -459,7 +463,8 @@ static struct crypto_alg serpent_algs[10] = { {
459 .cra_name = "__xts-serpent-avx", 463 .cra_name = "__xts-serpent-avx",
460 .cra_driver_name = "__driver-xts-serpent-avx", 464 .cra_driver_name = "__driver-xts-serpent-avx",
461 .cra_priority = 0, 465 .cra_priority = 0,
462 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 466 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
467 CRYPTO_ALG_INTERNAL,
463 .cra_blocksize = SERPENT_BLOCK_SIZE, 468 .cra_blocksize = SERPENT_BLOCK_SIZE,
464 .cra_ctxsize = sizeof(struct serpent_xts_ctx), 469 .cra_ctxsize = sizeof(struct serpent_xts_ctx),
465 .cra_alignmask = 0, 470 .cra_alignmask = 0,
diff --git a/arch/x86/crypto/serpent_sse2_glue.c b/arch/x86/crypto/serpent_sse2_glue.c
index bf025adaea01..3643dd508f45 100644
--- a/arch/x86/crypto/serpent_sse2_glue.c
+++ b/arch/x86/crypto/serpent_sse2_glue.c
@@ -387,7 +387,8 @@ static struct crypto_alg serpent_algs[10] = { {
387 .cra_name = "__ecb-serpent-sse2", 387 .cra_name = "__ecb-serpent-sse2",
388 .cra_driver_name = "__driver-ecb-serpent-sse2", 388 .cra_driver_name = "__driver-ecb-serpent-sse2",
389 .cra_priority = 0, 389 .cra_priority = 0,
390 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 390 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
391 CRYPTO_ALG_INTERNAL,
391 .cra_blocksize = SERPENT_BLOCK_SIZE, 392 .cra_blocksize = SERPENT_BLOCK_SIZE,
392 .cra_ctxsize = sizeof(struct serpent_ctx), 393 .cra_ctxsize = sizeof(struct serpent_ctx),
393 .cra_alignmask = 0, 394 .cra_alignmask = 0,
@@ -406,7 +407,8 @@ static struct crypto_alg serpent_algs[10] = { {
406 .cra_name = "__cbc-serpent-sse2", 407 .cra_name = "__cbc-serpent-sse2",
407 .cra_driver_name = "__driver-cbc-serpent-sse2", 408 .cra_driver_name = "__driver-cbc-serpent-sse2",
408 .cra_priority = 0, 409 .cra_priority = 0,
409 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 410 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
411 CRYPTO_ALG_INTERNAL,
410 .cra_blocksize = SERPENT_BLOCK_SIZE, 412 .cra_blocksize = SERPENT_BLOCK_SIZE,
411 .cra_ctxsize = sizeof(struct serpent_ctx), 413 .cra_ctxsize = sizeof(struct serpent_ctx),
412 .cra_alignmask = 0, 414 .cra_alignmask = 0,
@@ -425,7 +427,8 @@ static struct crypto_alg serpent_algs[10] = { {
425 .cra_name = "__ctr-serpent-sse2", 427 .cra_name = "__ctr-serpent-sse2",
426 .cra_driver_name = "__driver-ctr-serpent-sse2", 428 .cra_driver_name = "__driver-ctr-serpent-sse2",
427 .cra_priority = 0, 429 .cra_priority = 0,
428 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 430 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
431 CRYPTO_ALG_INTERNAL,
429 .cra_blocksize = 1, 432 .cra_blocksize = 1,
430 .cra_ctxsize = sizeof(struct serpent_ctx), 433 .cra_ctxsize = sizeof(struct serpent_ctx),
431 .cra_alignmask = 0, 434 .cra_alignmask = 0,
@@ -445,7 +448,8 @@ static struct crypto_alg serpent_algs[10] = { {
445 .cra_name = "__lrw-serpent-sse2", 448 .cra_name = "__lrw-serpent-sse2",
446 .cra_driver_name = "__driver-lrw-serpent-sse2", 449 .cra_driver_name = "__driver-lrw-serpent-sse2",
447 .cra_priority = 0, 450 .cra_priority = 0,
448 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 451 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
452 CRYPTO_ALG_INTERNAL,
449 .cra_blocksize = SERPENT_BLOCK_SIZE, 453 .cra_blocksize = SERPENT_BLOCK_SIZE,
450 .cra_ctxsize = sizeof(struct serpent_lrw_ctx), 454 .cra_ctxsize = sizeof(struct serpent_lrw_ctx),
451 .cra_alignmask = 0, 455 .cra_alignmask = 0,
@@ -468,7 +472,8 @@ static struct crypto_alg serpent_algs[10] = { {
468 .cra_name = "__xts-serpent-sse2", 472 .cra_name = "__xts-serpent-sse2",
469 .cra_driver_name = "__driver-xts-serpent-sse2", 473 .cra_driver_name = "__driver-xts-serpent-sse2",
470 .cra_priority = 0, 474 .cra_priority = 0,
471 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 475 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
476 CRYPTO_ALG_INTERNAL,
472 .cra_blocksize = SERPENT_BLOCK_SIZE, 477 .cra_blocksize = SERPENT_BLOCK_SIZE,
473 .cra_ctxsize = sizeof(struct serpent_xts_ctx), 478 .cra_ctxsize = sizeof(struct serpent_xts_ctx),
474 .cra_alignmask = 0, 479 .cra_alignmask = 0,
diff --git a/arch/x86/crypto/sha-mb/sha1_mb.c b/arch/x86/crypto/sha-mb/sha1_mb.c
index fd9f6b035b16..e510b1c5d690 100644
--- a/arch/x86/crypto/sha-mb/sha1_mb.c
+++ b/arch/x86/crypto/sha-mb/sha1_mb.c
@@ -694,7 +694,8 @@ static struct shash_alg sha1_mb_shash_alg = {
694 * use ASYNC flag as some buffers in multi-buffer 694 * use ASYNC flag as some buffers in multi-buffer
695 * algo may not have completed before hashing thread sleep 695 * algo may not have completed before hashing thread sleep
696 */ 696 */
697 .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_ASYNC, 697 .cra_flags = CRYPTO_ALG_TYPE_SHASH | CRYPTO_ALG_ASYNC |
698 CRYPTO_ALG_INTERNAL,
698 .cra_blocksize = SHA1_BLOCK_SIZE, 699 .cra_blocksize = SHA1_BLOCK_SIZE,
699 .cra_module = THIS_MODULE, 700 .cra_module = THIS_MODULE,
700 .cra_list = LIST_HEAD_INIT(sha1_mb_shash_alg.base.cra_list), 701 .cra_list = LIST_HEAD_INIT(sha1_mb_shash_alg.base.cra_list),
@@ -770,7 +771,9 @@ static int sha1_mb_async_init_tfm(struct crypto_tfm *tfm)
770 struct sha1_mb_ctx *ctx = crypto_tfm_ctx(tfm); 771 struct sha1_mb_ctx *ctx = crypto_tfm_ctx(tfm);
771 struct mcryptd_hash_ctx *mctx; 772 struct mcryptd_hash_ctx *mctx;
772 773
773 mcryptd_tfm = mcryptd_alloc_ahash("__intel_sha1-mb", 0, 0); 774 mcryptd_tfm = mcryptd_alloc_ahash("__intel_sha1-mb",
775 CRYPTO_ALG_INTERNAL,
776 CRYPTO_ALG_INTERNAL);
774 if (IS_ERR(mcryptd_tfm)) 777 if (IS_ERR(mcryptd_tfm))
775 return PTR_ERR(mcryptd_tfm); 778 return PTR_ERR(mcryptd_tfm);
776 mctx = crypto_ahash_ctx(&mcryptd_tfm->base); 779 mctx = crypto_ahash_ctx(&mcryptd_tfm->base);
@@ -828,7 +831,7 @@ static unsigned long sha1_mb_flusher(struct mcryptd_alg_cstate *cstate)
828 while (!list_empty(&cstate->work_list)) { 831 while (!list_empty(&cstate->work_list)) {
829 rctx = list_entry(cstate->work_list.next, 832 rctx = list_entry(cstate->work_list.next,
830 struct mcryptd_hash_request_ctx, waiter); 833 struct mcryptd_hash_request_ctx, waiter);
831 if time_before(cur_time, rctx->tag.expire) 834 if (time_before(cur_time, rctx->tag.expire))
832 break; 835 break;
833 kernel_fpu_begin(); 836 kernel_fpu_begin();
834 sha_ctx = (struct sha1_hash_ctx *) sha1_ctx_mgr_flush(cstate->mgr); 837 sha_ctx = (struct sha1_hash_ctx *) sha1_ctx_mgr_flush(cstate->mgr);
diff --git a/arch/x86/crypto/sha-mb/sha1_mb_mgr_init_avx2.c b/arch/x86/crypto/sha-mb/sha1_mb_mgr_init_avx2.c
index 4ca7e166a2aa..822acb5b464c 100644
--- a/arch/x86/crypto/sha-mb/sha1_mb_mgr_init_avx2.c
+++ b/arch/x86/crypto/sha-mb/sha1_mb_mgr_init_avx2.c
@@ -56,7 +56,7 @@
56void sha1_mb_mgr_init_avx2(struct sha1_mb_mgr *state) 56void sha1_mb_mgr_init_avx2(struct sha1_mb_mgr *state)
57{ 57{
58 unsigned int j; 58 unsigned int j;
59 state->unused_lanes = 0xF76543210; 59 state->unused_lanes = 0xF76543210ULL;
60 for (j = 0; j < 8; j++) { 60 for (j = 0; j < 8; j++) {
61 state->lens[j] = 0xFFFFFFFF; 61 state->lens[j] = 0xFFFFFFFF;
62 state->ldata[j].job_in_lane = NULL; 62 state->ldata[j].job_in_lane = NULL;
diff --git a/arch/x86/crypto/sha1_ssse3_glue.c b/arch/x86/crypto/sha1_ssse3_glue.c
index 6c20fe04a738..33d1b9dc14cc 100644
--- a/arch/x86/crypto/sha1_ssse3_glue.c
+++ b/arch/x86/crypto/sha1_ssse3_glue.c
@@ -28,7 +28,7 @@
28#include <linux/cryptohash.h> 28#include <linux/cryptohash.h>
29#include <linux/types.h> 29#include <linux/types.h>
30#include <crypto/sha.h> 30#include <crypto/sha.h>
31#include <asm/byteorder.h> 31#include <crypto/sha1_base.h>
32#include <asm/i387.h> 32#include <asm/i387.h>
33#include <asm/xcr.h> 33#include <asm/xcr.h>
34#include <asm/xsave.h> 34#include <asm/xsave.h>
@@ -44,132 +44,51 @@ asmlinkage void sha1_transform_avx(u32 *digest, const char *data,
44#define SHA1_AVX2_BLOCK_OPTSIZE 4 /* optimal 4*64 bytes of SHA1 blocks */ 44#define SHA1_AVX2_BLOCK_OPTSIZE 4 /* optimal 4*64 bytes of SHA1 blocks */
45 45
46asmlinkage void sha1_transform_avx2(u32 *digest, const char *data, 46asmlinkage void sha1_transform_avx2(u32 *digest, const char *data,
47 unsigned int rounds); 47 unsigned int rounds);
48#endif 48#endif
49 49
50static asmlinkage void (*sha1_transform_asm)(u32 *, const char *, unsigned int); 50static void (*sha1_transform_asm)(u32 *, const char *, unsigned int);
51
52
53static int sha1_ssse3_init(struct shash_desc *desc)
54{
55 struct sha1_state *sctx = shash_desc_ctx(desc);
56
57 *sctx = (struct sha1_state){
58 .state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
59 };
60
61 return 0;
62}
63
64static int __sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
65 unsigned int len, unsigned int partial)
66{
67 struct sha1_state *sctx = shash_desc_ctx(desc);
68 unsigned int done = 0;
69
70 sctx->count += len;
71
72 if (partial) {
73 done = SHA1_BLOCK_SIZE - partial;
74 memcpy(sctx->buffer + partial, data, done);
75 sha1_transform_asm(sctx->state, sctx->buffer, 1);
76 }
77
78 if (len - done >= SHA1_BLOCK_SIZE) {
79 const unsigned int rounds = (len - done) / SHA1_BLOCK_SIZE;
80
81 sha1_transform_asm(sctx->state, data + done, rounds);
82 done += rounds * SHA1_BLOCK_SIZE;
83 }
84
85 memcpy(sctx->buffer, data + done, len - done);
86
87 return 0;
88}
89 51
90static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data, 52static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
91 unsigned int len) 53 unsigned int len)
92{ 54{
93 struct sha1_state *sctx = shash_desc_ctx(desc); 55 struct sha1_state *sctx = shash_desc_ctx(desc);
94 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE;
95 int res;
96 56
97 /* Handle the fast case right here */ 57 if (!irq_fpu_usable() ||
98 if (partial + len < SHA1_BLOCK_SIZE) { 58 (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE)
99 sctx->count += len; 59 return crypto_sha1_update(desc, data, len);
100 memcpy(sctx->buffer + partial, data, len);
101 60
102 return 0; 61 /* make sure casting to sha1_block_fn() is safe */
103 } 62 BUILD_BUG_ON(offsetof(struct sha1_state, state) != 0);
104 63
105 if (!irq_fpu_usable()) { 64 kernel_fpu_begin();
106 res = crypto_sha1_update(desc, data, len); 65 sha1_base_do_update(desc, data, len,
107 } else { 66 (sha1_block_fn *)sha1_transform_asm);
108 kernel_fpu_begin(); 67 kernel_fpu_end();
109 res = __sha1_ssse3_update(desc, data, len, partial);
110 kernel_fpu_end();
111 }
112
113 return res;
114}
115
116
117/* Add padding and return the message digest. */
118static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
119{
120 struct sha1_state *sctx = shash_desc_ctx(desc);
121 unsigned int i, index, padlen;
122 __be32 *dst = (__be32 *)out;
123 __be64 bits;
124 static const u8 padding[SHA1_BLOCK_SIZE] = { 0x80, };
125
126 bits = cpu_to_be64(sctx->count << 3);
127
128 /* Pad out to 56 mod 64 and append length */
129 index = sctx->count % SHA1_BLOCK_SIZE;
130 padlen = (index < 56) ? (56 - index) : ((SHA1_BLOCK_SIZE+56) - index);
131 if (!irq_fpu_usable()) {
132 crypto_sha1_update(desc, padding, padlen);
133 crypto_sha1_update(desc, (const u8 *)&bits, sizeof(bits));
134 } else {
135 kernel_fpu_begin();
136 /* We need to fill a whole block for __sha1_ssse3_update() */
137 if (padlen <= 56) {
138 sctx->count += padlen;
139 memcpy(sctx->buffer + index, padding, padlen);
140 } else {
141 __sha1_ssse3_update(desc, padding, padlen, index);
142 }
143 __sha1_ssse3_update(desc, (const u8 *)&bits, sizeof(bits), 56);
144 kernel_fpu_end();
145 }
146
147 /* Store state in digest */
148 for (i = 0; i < 5; i++)
149 dst[i] = cpu_to_be32(sctx->state[i]);
150
151 /* Wipe context */
152 memset(sctx, 0, sizeof(*sctx));
153 68
154 return 0; 69 return 0;
155} 70}
156 71
157static int sha1_ssse3_export(struct shash_desc *desc, void *out) 72static int sha1_ssse3_finup(struct shash_desc *desc, const u8 *data,
73 unsigned int len, u8 *out)
158{ 74{
159 struct sha1_state *sctx = shash_desc_ctx(desc); 75 if (!irq_fpu_usable())
76 return crypto_sha1_finup(desc, data, len, out);
160 77
161 memcpy(out, sctx, sizeof(*sctx)); 78 kernel_fpu_begin();
79 if (len)
80 sha1_base_do_update(desc, data, len,
81 (sha1_block_fn *)sha1_transform_asm);
82 sha1_base_do_finalize(desc, (sha1_block_fn *)sha1_transform_asm);
83 kernel_fpu_end();
162 84
163 return 0; 85 return sha1_base_finish(desc, out);
164} 86}
165 87
166static int sha1_ssse3_import(struct shash_desc *desc, const void *in) 88/* Add padding and return the message digest. */
89static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
167{ 90{
168 struct sha1_state *sctx = shash_desc_ctx(desc); 91 return sha1_ssse3_finup(desc, NULL, 0, out);
169
170 memcpy(sctx, in, sizeof(*sctx));
171
172 return 0;
173} 92}
174 93
175#ifdef CONFIG_AS_AVX2 94#ifdef CONFIG_AS_AVX2
@@ -186,13 +105,11 @@ static void sha1_apply_transform_avx2(u32 *digest, const char *data,
186 105
187static struct shash_alg alg = { 106static struct shash_alg alg = {
188 .digestsize = SHA1_DIGEST_SIZE, 107 .digestsize = SHA1_DIGEST_SIZE,
189 .init = sha1_ssse3_init, 108 .init = sha1_base_init,
190 .update = sha1_ssse3_update, 109 .update = sha1_ssse3_update,
191 .final = sha1_ssse3_final, 110 .final = sha1_ssse3_final,
192 .export = sha1_ssse3_export, 111 .finup = sha1_ssse3_finup,
193 .import = sha1_ssse3_import,
194 .descsize = sizeof(struct sha1_state), 112 .descsize = sizeof(struct sha1_state),
195 .statesize = sizeof(struct sha1_state),
196 .base = { 113 .base = {
197 .cra_name = "sha1", 114 .cra_name = "sha1",
198 .cra_driver_name= "sha1-ssse3", 115 .cra_driver_name= "sha1-ssse3",
diff --git a/arch/x86/crypto/sha256-avx-asm.S b/arch/x86/crypto/sha256-avx-asm.S
index 642f15687a0a..92b3b5d75ba9 100644
--- a/arch/x86/crypto/sha256-avx-asm.S
+++ b/arch/x86/crypto/sha256-avx-asm.S
@@ -96,10 +96,10 @@ SHUF_DC00 = %xmm12 # shuffle xDxC -> DC00
96BYTE_FLIP_MASK = %xmm13 96BYTE_FLIP_MASK = %xmm13
97 97
98NUM_BLKS = %rdx # 3rd arg 98NUM_BLKS = %rdx # 3rd arg
99CTX = %rsi # 2nd arg 99INP = %rsi # 2nd arg
100INP = %rdi # 1st arg 100CTX = %rdi # 1st arg
101 101
102SRND = %rdi # clobbers INP 102SRND = %rsi # clobbers INP
103c = %ecx 103c = %ecx
104d = %r8d 104d = %r8d
105e = %edx 105e = %edx
@@ -342,8 +342,8 @@ a = TMP_
342 342
343######################################################################## 343########################################################################
344## void sha256_transform_avx(void *input_data, UINT32 digest[8], UINT64 num_blks) 344## void sha256_transform_avx(void *input_data, UINT32 digest[8], UINT64 num_blks)
345## arg 1 : pointer to input data 345## arg 1 : pointer to digest
346## arg 2 : pointer to digest 346## arg 2 : pointer to input data
347## arg 3 : Num blocks 347## arg 3 : Num blocks
348######################################################################## 348########################################################################
349.text 349.text
diff --git a/arch/x86/crypto/sha256-avx2-asm.S b/arch/x86/crypto/sha256-avx2-asm.S
index 9e86944c539d..570ec5ec62d7 100644
--- a/arch/x86/crypto/sha256-avx2-asm.S
+++ b/arch/x86/crypto/sha256-avx2-asm.S
@@ -91,12 +91,12 @@ BYTE_FLIP_MASK = %ymm13
91X_BYTE_FLIP_MASK = %xmm13 # XMM version of BYTE_FLIP_MASK 91X_BYTE_FLIP_MASK = %xmm13 # XMM version of BYTE_FLIP_MASK
92 92
93NUM_BLKS = %rdx # 3rd arg 93NUM_BLKS = %rdx # 3rd arg
94CTX = %rsi # 2nd arg 94INP = %rsi # 2nd arg
95INP = %rdi # 1st arg 95CTX = %rdi # 1st arg
96c = %ecx 96c = %ecx
97d = %r8d 97d = %r8d
98e = %edx # clobbers NUM_BLKS 98e = %edx # clobbers NUM_BLKS
99y3 = %edi # clobbers INP 99y3 = %esi # clobbers INP
100 100
101 101
102TBL = %rbp 102TBL = %rbp
@@ -523,8 +523,8 @@ STACK_SIZE = _RSP + _RSP_SIZE
523 523
524######################################################################## 524########################################################################
525## void sha256_transform_rorx(void *input_data, UINT32 digest[8], UINT64 num_blks) 525## void sha256_transform_rorx(void *input_data, UINT32 digest[8], UINT64 num_blks)
526## arg 1 : pointer to input data 526## arg 1 : pointer to digest
527## arg 2 : pointer to digest 527## arg 2 : pointer to input data
528## arg 3 : Num blocks 528## arg 3 : Num blocks
529######################################################################## 529########################################################################
530.text 530.text
diff --git a/arch/x86/crypto/sha256-ssse3-asm.S b/arch/x86/crypto/sha256-ssse3-asm.S
index f833b74d902b..2cedc44e8121 100644
--- a/arch/x86/crypto/sha256-ssse3-asm.S
+++ b/arch/x86/crypto/sha256-ssse3-asm.S
@@ -88,10 +88,10 @@ SHUF_DC00 = %xmm11 # shuffle xDxC -> DC00
88BYTE_FLIP_MASK = %xmm12 88BYTE_FLIP_MASK = %xmm12
89 89
90NUM_BLKS = %rdx # 3rd arg 90NUM_BLKS = %rdx # 3rd arg
91CTX = %rsi # 2nd arg 91INP = %rsi # 2nd arg
92INP = %rdi # 1st arg 92CTX = %rdi # 1st arg
93 93
94SRND = %rdi # clobbers INP 94SRND = %rsi # clobbers INP
95c = %ecx 95c = %ecx
96d = %r8d 96d = %r8d
97e = %edx 97e = %edx
@@ -348,8 +348,8 @@ a = TMP_
348 348
349######################################################################## 349########################################################################
350## void sha256_transform_ssse3(void *input_data, UINT32 digest[8], UINT64 num_blks) 350## void sha256_transform_ssse3(void *input_data, UINT32 digest[8], UINT64 num_blks)
351## arg 1 : pointer to input data 351## arg 1 : pointer to digest
352## arg 2 : pointer to digest 352## arg 2 : pointer to input data
353## arg 3 : Num blocks 353## arg 3 : Num blocks
354######################################################################## 354########################################################################
355.text 355.text
diff --git a/arch/x86/crypto/sha256_ssse3_glue.c b/arch/x86/crypto/sha256_ssse3_glue.c
index 8fad72f4dfd2..ccc338881ee8 100644
--- a/arch/x86/crypto/sha256_ssse3_glue.c
+++ b/arch/x86/crypto/sha256_ssse3_glue.c
@@ -36,195 +36,74 @@
36#include <linux/cryptohash.h> 36#include <linux/cryptohash.h>
37#include <linux/types.h> 37#include <linux/types.h>
38#include <crypto/sha.h> 38#include <crypto/sha.h>
39#include <asm/byteorder.h> 39#include <crypto/sha256_base.h>
40#include <asm/i387.h> 40#include <asm/i387.h>
41#include <asm/xcr.h> 41#include <asm/xcr.h>
42#include <asm/xsave.h> 42#include <asm/xsave.h>
43#include <linux/string.h> 43#include <linux/string.h>
44 44
45asmlinkage void sha256_transform_ssse3(const char *data, u32 *digest, 45asmlinkage void sha256_transform_ssse3(u32 *digest, const char *data,
46 u64 rounds); 46 u64 rounds);
47#ifdef CONFIG_AS_AVX 47#ifdef CONFIG_AS_AVX
48asmlinkage void sha256_transform_avx(const char *data, u32 *digest, 48asmlinkage void sha256_transform_avx(u32 *digest, const char *data,
49 u64 rounds); 49 u64 rounds);
50#endif 50#endif
51#ifdef CONFIG_AS_AVX2 51#ifdef CONFIG_AS_AVX2
52asmlinkage void sha256_transform_rorx(const char *data, u32 *digest, 52asmlinkage void sha256_transform_rorx(u32 *digest, const char *data,
53 u64 rounds); 53 u64 rounds);
54#endif 54#endif
55 55
56static asmlinkage void (*sha256_transform_asm)(const char *, u32 *, u64); 56static void (*sha256_transform_asm)(u32 *, const char *, u64);
57
58
59static int sha256_ssse3_init(struct shash_desc *desc)
60{
61 struct sha256_state *sctx = shash_desc_ctx(desc);
62
63 sctx->state[0] = SHA256_H0;
64 sctx->state[1] = SHA256_H1;
65 sctx->state[2] = SHA256_H2;
66 sctx->state[3] = SHA256_H3;
67 sctx->state[4] = SHA256_H4;
68 sctx->state[5] = SHA256_H5;
69 sctx->state[6] = SHA256_H6;
70 sctx->state[7] = SHA256_H7;
71 sctx->count = 0;
72
73 return 0;
74}
75
76static int __sha256_ssse3_update(struct shash_desc *desc, const u8 *data,
77 unsigned int len, unsigned int partial)
78{
79 struct sha256_state *sctx = shash_desc_ctx(desc);
80 unsigned int done = 0;
81
82 sctx->count += len;
83
84 if (partial) {
85 done = SHA256_BLOCK_SIZE - partial;
86 memcpy(sctx->buf + partial, data, done);
87 sha256_transform_asm(sctx->buf, sctx->state, 1);
88 }
89
90 if (len - done >= SHA256_BLOCK_SIZE) {
91 const unsigned int rounds = (len - done) / SHA256_BLOCK_SIZE;
92
93 sha256_transform_asm(data + done, sctx->state, (u64) rounds);
94
95 done += rounds * SHA256_BLOCK_SIZE;
96 }
97
98 memcpy(sctx->buf, data + done, len - done);
99
100 return 0;
101}
102 57
103static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data, 58static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data,
104 unsigned int len) 59 unsigned int len)
105{ 60{
106 struct sha256_state *sctx = shash_desc_ctx(desc); 61 struct sha256_state *sctx = shash_desc_ctx(desc);
107 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
108 int res;
109 62
110 /* Handle the fast case right here */ 63 if (!irq_fpu_usable() ||
111 if (partial + len < SHA256_BLOCK_SIZE) { 64 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE)
112 sctx->count += len; 65 return crypto_sha256_update(desc, data, len);
113 memcpy(sctx->buf + partial, data, len);
114 66
115 return 0; 67 /* make sure casting to sha256_block_fn() is safe */
116 } 68 BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
117
118 if (!irq_fpu_usable()) {
119 res = crypto_sha256_update(desc, data, len);
120 } else {
121 kernel_fpu_begin();
122 res = __sha256_ssse3_update(desc, data, len, partial);
123 kernel_fpu_end();
124 }
125
126 return res;
127}
128 69
129 70 kernel_fpu_begin();
130/* Add padding and return the message digest. */ 71 sha256_base_do_update(desc, data, len,
131static int sha256_ssse3_final(struct shash_desc *desc, u8 *out) 72 (sha256_block_fn *)sha256_transform_asm);
132{ 73 kernel_fpu_end();
133 struct sha256_state *sctx = shash_desc_ctx(desc);
134 unsigned int i, index, padlen;
135 __be32 *dst = (__be32 *)out;
136 __be64 bits;
137 static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, };
138
139 bits = cpu_to_be64(sctx->count << 3);
140
141 /* Pad out to 56 mod 64 and append length */
142 index = sctx->count % SHA256_BLOCK_SIZE;
143 padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56)-index);
144
145 if (!irq_fpu_usable()) {
146 crypto_sha256_update(desc, padding, padlen);
147 crypto_sha256_update(desc, (const u8 *)&bits, sizeof(bits));
148 } else {
149 kernel_fpu_begin();
150 /* We need to fill a whole block for __sha256_ssse3_update() */
151 if (padlen <= 56) {
152 sctx->count += padlen;
153 memcpy(sctx->buf + index, padding, padlen);
154 } else {
155 __sha256_ssse3_update(desc, padding, padlen, index);
156 }
157 __sha256_ssse3_update(desc, (const u8 *)&bits,
158 sizeof(bits), 56);
159 kernel_fpu_end();
160 }
161
162 /* Store state in digest */
163 for (i = 0; i < 8; i++)
164 dst[i] = cpu_to_be32(sctx->state[i]);
165
166 /* Wipe context */
167 memset(sctx, 0, sizeof(*sctx));
168 74
169 return 0; 75 return 0;
170} 76}
171 77
172static int sha256_ssse3_export(struct shash_desc *desc, void *out) 78static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data,
79 unsigned int len, u8 *out)
173{ 80{
174 struct sha256_state *sctx = shash_desc_ctx(desc); 81 if (!irq_fpu_usable())
82 return crypto_sha256_finup(desc, data, len, out);
175 83
176 memcpy(out, sctx, sizeof(*sctx)); 84 kernel_fpu_begin();
85 if (len)
86 sha256_base_do_update(desc, data, len,
87 (sha256_block_fn *)sha256_transform_asm);
88 sha256_base_do_finalize(desc, (sha256_block_fn *)sha256_transform_asm);
89 kernel_fpu_end();
177 90
178 return 0; 91 return sha256_base_finish(desc, out);
179} 92}
180 93
181static int sha256_ssse3_import(struct shash_desc *desc, const void *in) 94/* Add padding and return the message digest. */
182{ 95static int sha256_ssse3_final(struct shash_desc *desc, u8 *out)
183 struct sha256_state *sctx = shash_desc_ctx(desc);
184
185 memcpy(sctx, in, sizeof(*sctx));
186
187 return 0;
188}
189
190static int sha224_ssse3_init(struct shash_desc *desc)
191{
192 struct sha256_state *sctx = shash_desc_ctx(desc);
193
194 sctx->state[0] = SHA224_H0;
195 sctx->state[1] = SHA224_H1;
196 sctx->state[2] = SHA224_H2;
197 sctx->state[3] = SHA224_H3;
198 sctx->state[4] = SHA224_H4;
199 sctx->state[5] = SHA224_H5;
200 sctx->state[6] = SHA224_H6;
201 sctx->state[7] = SHA224_H7;
202 sctx->count = 0;
203
204 return 0;
205}
206
207static int sha224_ssse3_final(struct shash_desc *desc, u8 *hash)
208{ 96{
209 u8 D[SHA256_DIGEST_SIZE]; 97 return sha256_ssse3_finup(desc, NULL, 0, out);
210
211 sha256_ssse3_final(desc, D);
212
213 memcpy(hash, D, SHA224_DIGEST_SIZE);
214 memzero_explicit(D, SHA256_DIGEST_SIZE);
215
216 return 0;
217} 98}
218 99
219static struct shash_alg algs[] = { { 100static struct shash_alg algs[] = { {
220 .digestsize = SHA256_DIGEST_SIZE, 101 .digestsize = SHA256_DIGEST_SIZE,
221 .init = sha256_ssse3_init, 102 .init = sha256_base_init,
222 .update = sha256_ssse3_update, 103 .update = sha256_ssse3_update,
223 .final = sha256_ssse3_final, 104 .final = sha256_ssse3_final,
224 .export = sha256_ssse3_export, 105 .finup = sha256_ssse3_finup,
225 .import = sha256_ssse3_import,
226 .descsize = sizeof(struct sha256_state), 106 .descsize = sizeof(struct sha256_state),
227 .statesize = sizeof(struct sha256_state),
228 .base = { 107 .base = {
229 .cra_name = "sha256", 108 .cra_name = "sha256",
230 .cra_driver_name = "sha256-ssse3", 109 .cra_driver_name = "sha256-ssse3",
@@ -235,13 +114,11 @@ static struct shash_alg algs[] = { {
235 } 114 }
236}, { 115}, {
237 .digestsize = SHA224_DIGEST_SIZE, 116 .digestsize = SHA224_DIGEST_SIZE,
238 .init = sha224_ssse3_init, 117 .init = sha224_base_init,
239 .update = sha256_ssse3_update, 118 .update = sha256_ssse3_update,
240 .final = sha224_ssse3_final, 119 .final = sha256_ssse3_final,
241 .export = sha256_ssse3_export, 120 .finup = sha256_ssse3_finup,
242 .import = sha256_ssse3_import,
243 .descsize = sizeof(struct sha256_state), 121 .descsize = sizeof(struct sha256_state),
244 .statesize = sizeof(struct sha256_state),
245 .base = { 122 .base = {
246 .cra_name = "sha224", 123 .cra_name = "sha224",
247 .cra_driver_name = "sha224-ssse3", 124 .cra_driver_name = "sha224-ssse3",
diff --git a/arch/x86/crypto/sha512-avx-asm.S b/arch/x86/crypto/sha512-avx-asm.S
index 974dde9bc6cd..565274d6a641 100644
--- a/arch/x86/crypto/sha512-avx-asm.S
+++ b/arch/x86/crypto/sha512-avx-asm.S
@@ -54,9 +54,9 @@
54 54
55# Virtual Registers 55# Virtual Registers
56# ARG1 56# ARG1
57msg = %rdi 57digest = %rdi
58# ARG2 58# ARG2
59digest = %rsi 59msg = %rsi
60# ARG3 60# ARG3
61msglen = %rdx 61msglen = %rdx
62T1 = %rcx 62T1 = %rcx
@@ -271,7 +271,7 @@ frame_size = frame_GPRSAVE + GPRSAVE_SIZE
271.endm 271.endm
272 272
273######################################################################## 273########################################################################
274# void sha512_transform_avx(const void* M, void* D, u64 L) 274# void sha512_transform_avx(void* D, const void* M, u64 L)
275# Purpose: Updates the SHA512 digest stored at D with the message stored in M. 275# Purpose: Updates the SHA512 digest stored at D with the message stored in M.
276# The size of the message pointed to by M must be an integer multiple of SHA512 276# The size of the message pointed to by M must be an integer multiple of SHA512
277# message blocks. 277# message blocks.
diff --git a/arch/x86/crypto/sha512-avx2-asm.S b/arch/x86/crypto/sha512-avx2-asm.S
index 568b96105f5c..a4771dcd1fcf 100644
--- a/arch/x86/crypto/sha512-avx2-asm.S
+++ b/arch/x86/crypto/sha512-avx2-asm.S
@@ -70,9 +70,9 @@ XFER = YTMP0
70BYTE_FLIP_MASK = %ymm9 70BYTE_FLIP_MASK = %ymm9
71 71
72# 1st arg 72# 1st arg
73INP = %rdi 73CTX = %rdi
74# 2nd arg 74# 2nd arg
75CTX = %rsi 75INP = %rsi
76# 3rd arg 76# 3rd arg
77NUM_BLKS = %rdx 77NUM_BLKS = %rdx
78 78
@@ -562,7 +562,7 @@ frame_size = frame_GPRSAVE + GPRSAVE_SIZE
562.endm 562.endm
563 563
564######################################################################## 564########################################################################
565# void sha512_transform_rorx(const void* M, void* D, uint64_t L)# 565# void sha512_transform_rorx(void* D, const void* M, uint64_t L)#
566# Purpose: Updates the SHA512 digest stored at D with the message stored in M. 566# Purpose: Updates the SHA512 digest stored at D with the message stored in M.
567# The size of the message pointed to by M must be an integer multiple of SHA512 567# The size of the message pointed to by M must be an integer multiple of SHA512
568# message blocks. 568# message blocks.
diff --git a/arch/x86/crypto/sha512-ssse3-asm.S b/arch/x86/crypto/sha512-ssse3-asm.S
index fb56855d51f5..e610e29cbc81 100644
--- a/arch/x86/crypto/sha512-ssse3-asm.S
+++ b/arch/x86/crypto/sha512-ssse3-asm.S
@@ -53,9 +53,9 @@
53 53
54# Virtual Registers 54# Virtual Registers
55# ARG1 55# ARG1
56msg = %rdi 56digest = %rdi
57# ARG2 57# ARG2
58digest = %rsi 58msg = %rsi
59# ARG3 59# ARG3
60msglen = %rdx 60msglen = %rdx
61T1 = %rcx 61T1 = %rcx
@@ -269,7 +269,7 @@ frame_size = frame_GPRSAVE + GPRSAVE_SIZE
269.endm 269.endm
270 270
271######################################################################## 271########################################################################
272# void sha512_transform_ssse3(const void* M, void* D, u64 L)# 272# void sha512_transform_ssse3(void* D, const void* M, u64 L)#
273# Purpose: Updates the SHA512 digest stored at D with the message stored in M. 273# Purpose: Updates the SHA512 digest stored at D with the message stored in M.
274# The size of the message pointed to by M must be an integer multiple of SHA512 274# The size of the message pointed to by M must be an integer multiple of SHA512
275# message blocks. 275# message blocks.
diff --git a/arch/x86/crypto/sha512_ssse3_glue.c b/arch/x86/crypto/sha512_ssse3_glue.c
index 0b6af26832bf..d9fa4c1e063f 100644
--- a/arch/x86/crypto/sha512_ssse3_glue.c
+++ b/arch/x86/crypto/sha512_ssse3_glue.c
@@ -34,205 +34,75 @@
34#include <linux/cryptohash.h> 34#include <linux/cryptohash.h>
35#include <linux/types.h> 35#include <linux/types.h>
36#include <crypto/sha.h> 36#include <crypto/sha.h>
37#include <asm/byteorder.h> 37#include <crypto/sha512_base.h>
38#include <asm/i387.h> 38#include <asm/i387.h>
39#include <asm/xcr.h> 39#include <asm/xcr.h>
40#include <asm/xsave.h> 40#include <asm/xsave.h>
41 41
42#include <linux/string.h> 42#include <linux/string.h>
43 43
44asmlinkage void sha512_transform_ssse3(const char *data, u64 *digest, 44asmlinkage void sha512_transform_ssse3(u64 *digest, const char *data,
45 u64 rounds); 45 u64 rounds);
46#ifdef CONFIG_AS_AVX 46#ifdef CONFIG_AS_AVX
47asmlinkage void sha512_transform_avx(const char *data, u64 *digest, 47asmlinkage void sha512_transform_avx(u64 *digest, const char *data,
48 u64 rounds); 48 u64 rounds);
49#endif 49#endif
50#ifdef CONFIG_AS_AVX2 50#ifdef CONFIG_AS_AVX2
51asmlinkage void sha512_transform_rorx(const char *data, u64 *digest, 51asmlinkage void sha512_transform_rorx(u64 *digest, const char *data,
52 u64 rounds); 52 u64 rounds);
53#endif 53#endif
54 54
55static asmlinkage void (*sha512_transform_asm)(const char *, u64 *, u64); 55static void (*sha512_transform_asm)(u64 *, const char *, u64);
56
57
58static int sha512_ssse3_init(struct shash_desc *desc)
59{
60 struct sha512_state *sctx = shash_desc_ctx(desc);
61
62 sctx->state[0] = SHA512_H0;
63 sctx->state[1] = SHA512_H1;
64 sctx->state[2] = SHA512_H2;
65 sctx->state[3] = SHA512_H3;
66 sctx->state[4] = SHA512_H4;
67 sctx->state[5] = SHA512_H5;
68 sctx->state[6] = SHA512_H6;
69 sctx->state[7] = SHA512_H7;
70 sctx->count[0] = sctx->count[1] = 0;
71
72 return 0;
73}
74 56
75static int __sha512_ssse3_update(struct shash_desc *desc, const u8 *data, 57static int sha512_ssse3_update(struct shash_desc *desc, const u8 *data,
76 unsigned int len, unsigned int partial) 58 unsigned int len)
77{ 59{
78 struct sha512_state *sctx = shash_desc_ctx(desc); 60 struct sha512_state *sctx = shash_desc_ctx(desc);
79 unsigned int done = 0;
80
81 sctx->count[0] += len;
82 if (sctx->count[0] < len)
83 sctx->count[1]++;
84 61
85 if (partial) { 62 if (!irq_fpu_usable() ||
86 done = SHA512_BLOCK_SIZE - partial; 63 (sctx->count[0] % SHA512_BLOCK_SIZE) + len < SHA512_BLOCK_SIZE)
87 memcpy(sctx->buf + partial, data, done); 64 return crypto_sha512_update(desc, data, len);
88 sha512_transform_asm(sctx->buf, sctx->state, 1);
89 }
90
91 if (len - done >= SHA512_BLOCK_SIZE) {
92 const unsigned int rounds = (len - done) / SHA512_BLOCK_SIZE;
93 65
94 sha512_transform_asm(data + done, sctx->state, (u64) rounds); 66 /* make sure casting to sha512_block_fn() is safe */
95 67 BUILD_BUG_ON(offsetof(struct sha512_state, state) != 0);
96 done += rounds * SHA512_BLOCK_SIZE;
97 }
98 68
99 memcpy(sctx->buf, data + done, len - done); 69 kernel_fpu_begin();
70 sha512_base_do_update(desc, data, len,
71 (sha512_block_fn *)sha512_transform_asm);
72 kernel_fpu_end();
100 73
101 return 0; 74 return 0;
102} 75}
103 76
104static int sha512_ssse3_update(struct shash_desc *desc, const u8 *data, 77static int sha512_ssse3_finup(struct shash_desc *desc, const u8 *data,
105 unsigned int len) 78 unsigned int len, u8 *out)
106{ 79{
107 struct sha512_state *sctx = shash_desc_ctx(desc); 80 if (!irq_fpu_usable())
108 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; 81 return crypto_sha512_finup(desc, data, len, out);
109 int res;
110
111 /* Handle the fast case right here */
112 if (partial + len < SHA512_BLOCK_SIZE) {
113 sctx->count[0] += len;
114 if (sctx->count[0] < len)
115 sctx->count[1]++;
116 memcpy(sctx->buf + partial, data, len);
117
118 return 0;
119 }
120 82
121 if (!irq_fpu_usable()) { 83 kernel_fpu_begin();
122 res = crypto_sha512_update(desc, data, len); 84 if (len)
123 } else { 85 sha512_base_do_update(desc, data, len,
124 kernel_fpu_begin(); 86 (sha512_block_fn *)sha512_transform_asm);
125 res = __sha512_ssse3_update(desc, data, len, partial); 87 sha512_base_do_finalize(desc, (sha512_block_fn *)sha512_transform_asm);
126 kernel_fpu_end(); 88 kernel_fpu_end();
127 }
128 89
129 return res; 90 return sha512_base_finish(desc, out);
130} 91}
131 92
132
133/* Add padding and return the message digest. */ 93/* Add padding and return the message digest. */
134static int sha512_ssse3_final(struct shash_desc *desc, u8 *out) 94static int sha512_ssse3_final(struct shash_desc *desc, u8 *out)
135{ 95{
136 struct sha512_state *sctx = shash_desc_ctx(desc); 96 return sha512_ssse3_finup(desc, NULL, 0, out);
137 unsigned int i, index, padlen;
138 __be64 *dst = (__be64 *)out;
139 __be64 bits[2];
140 static const u8 padding[SHA512_BLOCK_SIZE] = { 0x80, };
141
142 /* save number of bits */
143 bits[1] = cpu_to_be64(sctx->count[0] << 3);
144 bits[0] = cpu_to_be64(sctx->count[1] << 3 | sctx->count[0] >> 61);
145
146 /* Pad out to 112 mod 128 and append length */
147 index = sctx->count[0] & 0x7f;
148 padlen = (index < 112) ? (112 - index) : ((128+112) - index);
149
150 if (!irq_fpu_usable()) {
151 crypto_sha512_update(desc, padding, padlen);
152 crypto_sha512_update(desc, (const u8 *)&bits, sizeof(bits));
153 } else {
154 kernel_fpu_begin();
155 /* We need to fill a whole block for __sha512_ssse3_update() */
156 if (padlen <= 112) {
157 sctx->count[0] += padlen;
158 if (sctx->count[0] < padlen)
159 sctx->count[1]++;
160 memcpy(sctx->buf + index, padding, padlen);
161 } else {
162 __sha512_ssse3_update(desc, padding, padlen, index);
163 }
164 __sha512_ssse3_update(desc, (const u8 *)&bits,
165 sizeof(bits), 112);
166 kernel_fpu_end();
167 }
168
169 /* Store state in digest */
170 for (i = 0; i < 8; i++)
171 dst[i] = cpu_to_be64(sctx->state[i]);
172
173 /* Wipe context */
174 memset(sctx, 0, sizeof(*sctx));
175
176 return 0;
177}
178
179static int sha512_ssse3_export(struct shash_desc *desc, void *out)
180{
181 struct sha512_state *sctx = shash_desc_ctx(desc);
182
183 memcpy(out, sctx, sizeof(*sctx));
184
185 return 0;
186}
187
188static int sha512_ssse3_import(struct shash_desc *desc, const void *in)
189{
190 struct sha512_state *sctx = shash_desc_ctx(desc);
191
192 memcpy(sctx, in, sizeof(*sctx));
193
194 return 0;
195}
196
197static int sha384_ssse3_init(struct shash_desc *desc)
198{
199 struct sha512_state *sctx = shash_desc_ctx(desc);
200
201 sctx->state[0] = SHA384_H0;
202 sctx->state[1] = SHA384_H1;
203 sctx->state[2] = SHA384_H2;
204 sctx->state[3] = SHA384_H3;
205 sctx->state[4] = SHA384_H4;
206 sctx->state[5] = SHA384_H5;
207 sctx->state[6] = SHA384_H6;
208 sctx->state[7] = SHA384_H7;
209
210 sctx->count[0] = sctx->count[1] = 0;
211
212 return 0;
213}
214
215static int sha384_ssse3_final(struct shash_desc *desc, u8 *hash)
216{
217 u8 D[SHA512_DIGEST_SIZE];
218
219 sha512_ssse3_final(desc, D);
220
221 memcpy(hash, D, SHA384_DIGEST_SIZE);
222 memzero_explicit(D, SHA512_DIGEST_SIZE);
223
224 return 0;
225} 97}
226 98
227static struct shash_alg algs[] = { { 99static struct shash_alg algs[] = { {
228 .digestsize = SHA512_DIGEST_SIZE, 100 .digestsize = SHA512_DIGEST_SIZE,
229 .init = sha512_ssse3_init, 101 .init = sha512_base_init,
230 .update = sha512_ssse3_update, 102 .update = sha512_ssse3_update,
231 .final = sha512_ssse3_final, 103 .final = sha512_ssse3_final,
232 .export = sha512_ssse3_export, 104 .finup = sha512_ssse3_finup,
233 .import = sha512_ssse3_import,
234 .descsize = sizeof(struct sha512_state), 105 .descsize = sizeof(struct sha512_state),
235 .statesize = sizeof(struct sha512_state),
236 .base = { 106 .base = {
237 .cra_name = "sha512", 107 .cra_name = "sha512",
238 .cra_driver_name = "sha512-ssse3", 108 .cra_driver_name = "sha512-ssse3",
@@ -243,13 +113,11 @@ static struct shash_alg algs[] = { {
243 } 113 }
244}, { 114}, {
245 .digestsize = SHA384_DIGEST_SIZE, 115 .digestsize = SHA384_DIGEST_SIZE,
246 .init = sha384_ssse3_init, 116 .init = sha384_base_init,
247 .update = sha512_ssse3_update, 117 .update = sha512_ssse3_update,
248 .final = sha384_ssse3_final, 118 .final = sha512_ssse3_final,
249 .export = sha512_ssse3_export, 119 .finup = sha512_ssse3_finup,
250 .import = sha512_ssse3_import,
251 .descsize = sizeof(struct sha512_state), 120 .descsize = sizeof(struct sha512_state),
252 .statesize = sizeof(struct sha512_state),
253 .base = { 121 .base = {
254 .cra_name = "sha384", 122 .cra_name = "sha384",
255 .cra_driver_name = "sha384-ssse3", 123 .cra_driver_name = "sha384-ssse3",
diff --git a/arch/x86/crypto/twofish_avx_glue.c b/arch/x86/crypto/twofish_avx_glue.c
index 1ac531ea9bcc..b5e2d5651851 100644
--- a/arch/x86/crypto/twofish_avx_glue.c
+++ b/arch/x86/crypto/twofish_avx_glue.c
@@ -340,7 +340,8 @@ static struct crypto_alg twofish_algs[10] = { {
340 .cra_name = "__ecb-twofish-avx", 340 .cra_name = "__ecb-twofish-avx",
341 .cra_driver_name = "__driver-ecb-twofish-avx", 341 .cra_driver_name = "__driver-ecb-twofish-avx",
342 .cra_priority = 0, 342 .cra_priority = 0,
343 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 343 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
344 CRYPTO_ALG_INTERNAL,
344 .cra_blocksize = TF_BLOCK_SIZE, 345 .cra_blocksize = TF_BLOCK_SIZE,
345 .cra_ctxsize = sizeof(struct twofish_ctx), 346 .cra_ctxsize = sizeof(struct twofish_ctx),
346 .cra_alignmask = 0, 347 .cra_alignmask = 0,
@@ -359,7 +360,8 @@ static struct crypto_alg twofish_algs[10] = { {
359 .cra_name = "__cbc-twofish-avx", 360 .cra_name = "__cbc-twofish-avx",
360 .cra_driver_name = "__driver-cbc-twofish-avx", 361 .cra_driver_name = "__driver-cbc-twofish-avx",
361 .cra_priority = 0, 362 .cra_priority = 0,
362 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 363 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
364 CRYPTO_ALG_INTERNAL,
363 .cra_blocksize = TF_BLOCK_SIZE, 365 .cra_blocksize = TF_BLOCK_SIZE,
364 .cra_ctxsize = sizeof(struct twofish_ctx), 366 .cra_ctxsize = sizeof(struct twofish_ctx),
365 .cra_alignmask = 0, 367 .cra_alignmask = 0,
@@ -378,7 +380,8 @@ static struct crypto_alg twofish_algs[10] = { {
378 .cra_name = "__ctr-twofish-avx", 380 .cra_name = "__ctr-twofish-avx",
379 .cra_driver_name = "__driver-ctr-twofish-avx", 381 .cra_driver_name = "__driver-ctr-twofish-avx",
380 .cra_priority = 0, 382 .cra_priority = 0,
381 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 383 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
384 CRYPTO_ALG_INTERNAL,
382 .cra_blocksize = 1, 385 .cra_blocksize = 1,
383 .cra_ctxsize = sizeof(struct twofish_ctx), 386 .cra_ctxsize = sizeof(struct twofish_ctx),
384 .cra_alignmask = 0, 387 .cra_alignmask = 0,
@@ -398,7 +401,8 @@ static struct crypto_alg twofish_algs[10] = { {
398 .cra_name = "__lrw-twofish-avx", 401 .cra_name = "__lrw-twofish-avx",
399 .cra_driver_name = "__driver-lrw-twofish-avx", 402 .cra_driver_name = "__driver-lrw-twofish-avx",
400 .cra_priority = 0, 403 .cra_priority = 0,
401 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 404 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
405 CRYPTO_ALG_INTERNAL,
402 .cra_blocksize = TF_BLOCK_SIZE, 406 .cra_blocksize = TF_BLOCK_SIZE,
403 .cra_ctxsize = sizeof(struct twofish_lrw_ctx), 407 .cra_ctxsize = sizeof(struct twofish_lrw_ctx),
404 .cra_alignmask = 0, 408 .cra_alignmask = 0,
@@ -421,7 +425,8 @@ static struct crypto_alg twofish_algs[10] = { {
421 .cra_name = "__xts-twofish-avx", 425 .cra_name = "__xts-twofish-avx",
422 .cra_driver_name = "__driver-xts-twofish-avx", 426 .cra_driver_name = "__driver-xts-twofish-avx",
423 .cra_priority = 0, 427 .cra_priority = 0,
424 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 428 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
429 CRYPTO_ALG_INTERNAL,
425 .cra_blocksize = TF_BLOCK_SIZE, 430 .cra_blocksize = TF_BLOCK_SIZE,
426 .cra_ctxsize = sizeof(struct twofish_xts_ctx), 431 .cra_ctxsize = sizeof(struct twofish_xts_ctx),
427 .cra_alignmask = 0, 432 .cra_alignmask = 0,