summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2017-11-22 14:51:36 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2017-11-29 01:33:32 -0500
commitdbd872a123fab81d0fa235a265c39e5ccdf735b3 (patch)
tree5c69967a0c96c4ae9504d4ee7b6d2c0ddea1373c
parentecf3220d882ae84844909ed6323032aac47aff93 (diff)
crypto: chacha20 - Use unaligned access macros when loading key and IV
The generic ChaCha20 implementation has a cra_alignmask of 3, which ensures that the key passed into crypto_chacha20_setkey() and the IV passed into crypto_chacha20_init() are 4-byte aligned. However, these functions are also called from the ARM and ARM64 implementations of ChaCha20, which intentionally do not have a cra_alignmask set. This is broken because 32-bit words are being loaded from potentially-unaligned buffers without the unaligned access macros. Fix it by using the unaligned access macros when loading the key and IV. Signed-off-by: Eric Biggers <ebiggers@google.com> Acked-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--crypto/chacha20_generic.c16
1 files changed, 6 insertions, 10 deletions
diff --git a/crypto/chacha20_generic.c b/crypto/chacha20_generic.c
index ec84e7837aac..b5a10ebf1b82 100644
--- a/crypto/chacha20_generic.c
+++ b/crypto/chacha20_generic.c
@@ -9,16 +9,12 @@
9 * (at your option) any later version. 9 * (at your option) any later version.
10 */ 10 */
11 11
12#include <asm/unaligned.h>
12#include <crypto/algapi.h> 13#include <crypto/algapi.h>
13#include <crypto/chacha20.h> 14#include <crypto/chacha20.h>
14#include <crypto/internal/skcipher.h> 15#include <crypto/internal/skcipher.h>
15#include <linux/module.h> 16#include <linux/module.h>
16 17
17static inline u32 le32_to_cpuvp(const void *p)
18{
19 return le32_to_cpup(p);
20}
21
22static void chacha20_docrypt(u32 *state, u8 *dst, const u8 *src, 18static void chacha20_docrypt(u32 *state, u8 *dst, const u8 *src,
23 unsigned int bytes) 19 unsigned int bytes)
24{ 20{
@@ -53,10 +49,10 @@ void crypto_chacha20_init(u32 *state, struct chacha20_ctx *ctx, u8 *iv)
53 state[9] = ctx->key[5]; 49 state[9] = ctx->key[5];
54 state[10] = ctx->key[6]; 50 state[10] = ctx->key[6];
55 state[11] = ctx->key[7]; 51 state[11] = ctx->key[7];
56 state[12] = le32_to_cpuvp(iv + 0); 52 state[12] = get_unaligned_le32(iv + 0);
57 state[13] = le32_to_cpuvp(iv + 4); 53 state[13] = get_unaligned_le32(iv + 4);
58 state[14] = le32_to_cpuvp(iv + 8); 54 state[14] = get_unaligned_le32(iv + 8);
59 state[15] = le32_to_cpuvp(iv + 12); 55 state[15] = get_unaligned_le32(iv + 12);
60} 56}
61EXPORT_SYMBOL_GPL(crypto_chacha20_init); 57EXPORT_SYMBOL_GPL(crypto_chacha20_init);
62 58
@@ -70,7 +66,7 @@ int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
70 return -EINVAL; 66 return -EINVAL;
71 67
72 for (i = 0; i < ARRAY_SIZE(ctx->key); i++) 68 for (i = 0; i < ARRAY_SIZE(ctx->key); i++)
73 ctx->key[i] = le32_to_cpuvp(key + i * sizeof(u32)); 69 ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
74 70
75 return 0; 71 return 0;
76} 72}