aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/algapi.c
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/algapi.c')
-rw-r--r--crypto/algapi.c69
1 files changed, 51 insertions, 18 deletions
diff --git a/crypto/algapi.c b/crypto/algapi.c
index df939b54b09f..6b52e8f0b95f 100644
--- a/crypto/algapi.c
+++ b/crypto/algapi.c
@@ -356,6 +356,7 @@ int crypto_register_alg(struct crypto_alg *alg)
356 struct crypto_larval *larval; 356 struct crypto_larval *larval;
357 int err; 357 int err;
358 358
359 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
359 err = crypto_check_alg(alg); 360 err = crypto_check_alg(alg);
360 if (err) 361 if (err)
361 return err; 362 return err;
@@ -961,34 +962,66 @@ void crypto_inc(u8 *a, unsigned int size)
961 __be32 *b = (__be32 *)(a + size); 962 __be32 *b = (__be32 *)(a + size);
962 u32 c; 963 u32 c;
963 964
964 for (; size >= 4; size -= 4) { 965 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
965 c = be32_to_cpu(*--b) + 1; 966 !((unsigned long)b & (__alignof__(*b) - 1)))
966 *b = cpu_to_be32(c); 967 for (; size >= 4; size -= 4) {
967 if (c) 968 c = be32_to_cpu(*--b) + 1;
968 return; 969 *b = cpu_to_be32(c);
969 } 970 if (c)
971 return;
972 }
970 973
971 crypto_inc_byte(a, size); 974 crypto_inc_byte(a, size);
972} 975}
973EXPORT_SYMBOL_GPL(crypto_inc); 976EXPORT_SYMBOL_GPL(crypto_inc);
974 977
975static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size) 978void __crypto_xor(u8 *dst, const u8 *src, unsigned int len)
976{ 979{
977 for (; size; size--) 980 int relalign = 0;
978 *a++ ^= *b++; 981
979} 982 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
983 int size = sizeof(unsigned long);
984 int d = ((unsigned long)dst ^ (unsigned long)src) & (size - 1);
985
986 relalign = d ? 1 << __ffs(d) : size;
987
988 /*
989 * If we care about alignment, process as many bytes as
990 * needed to advance dst and src to values whose alignments
991 * equal their relative alignment. This will allow us to
992 * process the remainder of the input using optimal strides.
993 */
994 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
995 *dst++ ^= *src++;
996 len--;
997 }
998 }
980 999
981void crypto_xor(u8 *dst, const u8 *src, unsigned int size) 1000 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
982{ 1001 *(u64 *)dst ^= *(u64 *)src;
983 u32 *a = (u32 *)dst; 1002 dst += 8;
984 u32 *b = (u32 *)src; 1003 src += 8;
1004 len -= 8;
1005 }
985 1006
986 for (; size >= 4; size -= 4) 1007 while (len >= 4 && !(relalign & 3)) {
987 *a++ ^= *b++; 1008 *(u32 *)dst ^= *(u32 *)src;
1009 dst += 4;
1010 src += 4;
1011 len -= 4;
1012 }
1013
1014 while (len >= 2 && !(relalign & 1)) {
1015 *(u16 *)dst ^= *(u16 *)src;
1016 dst += 2;
1017 src += 2;
1018 len -= 2;
1019 }
988 1020
989 crypto_xor_byte((u8 *)a, (u8 *)b, size); 1021 while (len--)
1022 *dst++ ^= *src++;
990} 1023}
991EXPORT_SYMBOL_GPL(crypto_xor); 1024EXPORT_SYMBOL_GPL(__crypto_xor);
992 1025
993unsigned int crypto_alg_extsize(struct crypto_alg *alg) 1026unsigned int crypto_alg_extsize(struct crypto_alg *alg)
994{ 1027{