diff options
author | Mathias Krause <minipli@googlemail.com> | 2014-03-24 12:10:38 -0400 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2014-03-25 08:25:43 -0400 |
commit | 6c8c17cc7a8806dde074d7c0bf4d519dd4d028c5 (patch) | |
tree | d391a0579378dbdd4de40f441bb94d0e579811ff | |
parent | 6ca5afb8c26991cf4f13a8bcca870ec2a9522bf7 (diff) |
crypto: x86/sha1 - fix stack alignment of AVX2 variant
The AVX2 implementation might waste up to a page of stack memory because
of a wrong alignment calculation. This will, in the worst case, increase
the stack usage of sha1_transform_avx2() alone to 5.4 kB -- way to big
for a kernel function. Even worse, it might also allocate *less* bytes
than needed if the stack pointer is already aligned bacause in that case
the 'sub %rbx, %rsp' is effectively moving the stack pointer upwards,
not downwards.
Fix those issues by changing and simplifying the alignment calculation
to use a 32 byte alignment, the alignment really needed.
Cc: Chandramouli Narayanan <mouli@linux.intel.com>
Signed-off-by: Mathias Krause <minipli@googlemail.com>
Reviewed-by: H. Peter Anvin <hpa@linux.intel.com>
Reviewed-by: Marek Vasut <marex@denx.de>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r-- | arch/x86/crypto/sha1_avx2_x86_64_asm.S | 7 |
1 files changed, 2 insertions, 5 deletions
diff --git a/arch/x86/crypto/sha1_avx2_x86_64_asm.S b/arch/x86/crypto/sha1_avx2_x86_64_asm.S index 4f348544d132..bacac22b20c2 100644 --- a/arch/x86/crypto/sha1_avx2_x86_64_asm.S +++ b/arch/x86/crypto/sha1_avx2_x86_64_asm.S | |||
@@ -636,9 +636,7 @@ _loop3: | |||
636 | 636 | ||
637 | /* Align stack */ | 637 | /* Align stack */ |
638 | mov %rsp, %rbx | 638 | mov %rsp, %rbx |
639 | and $(0x1000-1), %rbx | 639 | and $~(0x20-1), %rsp |
640 | sub $(8+32), %rbx | ||
641 | sub %rbx, %rsp | ||
642 | push %rbx | 640 | push %rbx |
643 | sub $RESERVE_STACK, %rsp | 641 | sub $RESERVE_STACK, %rsp |
644 | 642 | ||
@@ -665,8 +663,7 @@ _loop3: | |||
665 | avx2_zeroupper | 663 | avx2_zeroupper |
666 | 664 | ||
667 | add $RESERVE_STACK, %rsp | 665 | add $RESERVE_STACK, %rsp |
668 | pop %rbx | 666 | pop %rsp |
669 | add %rbx, %rsp | ||
670 | 667 | ||
671 | pop %r15 | 668 | pop %r15 |
672 | pop %r14 | 669 | pop %r14 |