summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2018-01-19 07:04:33 -0500
committerHerbert Xu <herbert@gondor.apana.org.au>2018-01-25 09:10:32 -0500
commitc013cee99d5a18aec8c71fee8f5f41369cd12595 (patch)
tree3a8ba8b3d6e8d687935633a1b80e3dd6578826b2 /crypto
parent9c674e1e2f9e24fa4392167efe343749008338e0 (diff)
crypto: sha3-generic - fixes for alignment and big endian operation
Ensure that the input is byte swabbed before injecting it into the SHA3 transform. Use the get_unaligned() accessor for this so that we don't perform unaligned access inadvertently on architectures that do not support that. Cc: <stable@vger.kernel.org> Fixes: 53964b9ee63b7075 ("crypto: sha3 - Add SHA-3 hash algorithm") Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/sha3_generic.c5
1 files changed, 3 insertions, 2 deletions
diff --git a/crypto/sha3_generic.c b/crypto/sha3_generic.c
index 7e8ed96236ce..a68be626017c 100644
--- a/crypto/sha3_generic.c
+++ b/crypto/sha3_generic.c
@@ -18,6 +18,7 @@
18#include <linux/types.h> 18#include <linux/types.h>
19#include <crypto/sha3.h> 19#include <crypto/sha3.h>
20#include <asm/byteorder.h> 20#include <asm/byteorder.h>
21#include <asm/unaligned.h>
21 22
22#define KECCAK_ROUNDS 24 23#define KECCAK_ROUNDS 24
23 24
@@ -149,7 +150,7 @@ static int sha3_update(struct shash_desc *desc, const u8 *data,
149 unsigned int i; 150 unsigned int i;
150 151
151 for (i = 0; i < sctx->rsizw; i++) 152 for (i = 0; i < sctx->rsizw; i++)
152 sctx->st[i] ^= ((u64 *) src)[i]; 153 sctx->st[i] ^= get_unaligned_le64(src + 8 * i);
153 keccakf(sctx->st); 154 keccakf(sctx->st);
154 155
155 done += sctx->rsiz; 156 done += sctx->rsiz;
@@ -174,7 +175,7 @@ static int sha3_final(struct shash_desc *desc, u8 *out)
174 sctx->buf[sctx->rsiz - 1] |= 0x80; 175 sctx->buf[sctx->rsiz - 1] |= 0x80;
175 176
176 for (i = 0; i < sctx->rsizw; i++) 177 for (i = 0; i < sctx->rsizw; i++)
177 sctx->st[i] ^= ((u64 *) sctx->buf)[i]; 178 sctx->st[i] ^= get_unaligned_le64(sctx->buf + 8 * i);
178 179
179 keccakf(sctx->st); 180 keccakf(sctx->st);
180 181