aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBorislav Petkov <bp@suse.de>2016-04-04 16:24:56 -0400
committerIngo Molnar <mingo@kernel.org>2016-04-13 05:37:40 -0400
commitda154e82af4d0c63e2334d5b3822426600b0490f (patch)
tree78a8d50cf5c45a82d7a274090255a0cedce810ba
parent1f4dd7938ea575a2d1972e180eaef31e6edb1808 (diff)
x86/cpufeature: Replace cpu_has_avx with boot_cpu_has() usage
Signed-off-by: Borislav Petkov <bp@suse.de> Cc: Andy Lutomirski <luto@amacapital.net> Cc: Borislav Petkov <bp@alien8.de> Cc: Brian Gerst <brgerst@gmail.com> Cc: Denys Vlasenko <dvlasenk@redhat.com> Cc: H. Peter Anvin <hpa@zytor.com> Cc: Linus Torvalds <torvalds@linux-foundation.org> Cc: Peter Zijlstra <peterz@infradead.org> Cc: Thomas Gleixner <tglx@linutronix.de> Cc: linux-crypto@vger.kernel.org Link: http://lkml.kernel.org/r/1459801503-15600-4-git-send-email-bp@alien8.de Signed-off-by: Ingo Molnar <mingo@kernel.org>
-rw-r--r--arch/x86/crypto/aesni-intel_glue.c2
-rw-r--r--arch/x86/crypto/camellia_aesni_avx2_glue.c3
-rw-r--r--arch/x86/crypto/camellia_aesni_avx_glue.c2
-rw-r--r--arch/x86/crypto/chacha20_glue.c3
-rw-r--r--arch/x86/crypto/poly1305_glue.c3
-rw-r--r--arch/x86/crypto/sha1_ssse3_glue.c2
-rw-r--r--arch/x86/crypto/sha256_ssse3_glue.c2
-rw-r--r--arch/x86/crypto/sha512_ssse3_glue.c2
-rw-r--r--arch/x86/include/asm/cpufeature.h1
-rw-r--r--arch/x86/include/asm/xor_avx.h4
10 files changed, 13 insertions, 11 deletions
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index 064c7e2bd7c8..5b7fa1471007 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -1477,7 +1477,7 @@ static int __init aesni_init(void)
1477 } 1477 }
1478 aesni_ctr_enc_tfm = aesni_ctr_enc; 1478 aesni_ctr_enc_tfm = aesni_ctr_enc;
1479#ifdef CONFIG_AS_AVX 1479#ifdef CONFIG_AS_AVX
1480 if (cpu_has_avx) { 1480 if (boot_cpu_has(X86_FEATURE_AVX)) {
1481 /* optimize performance of ctr mode encryption transform */ 1481 /* optimize performance of ctr mode encryption transform */
1482 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm; 1482 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1483 pr_info("AES CTR mode by8 optimization enabled\n"); 1483 pr_info("AES CTR mode by8 optimization enabled\n");
diff --git a/arch/x86/crypto/camellia_aesni_avx2_glue.c b/arch/x86/crypto/camellia_aesni_avx2_glue.c
index c07f699826a0..60907c139c4e 100644
--- a/arch/x86/crypto/camellia_aesni_avx2_glue.c
+++ b/arch/x86/crypto/camellia_aesni_avx2_glue.c
@@ -562,7 +562,8 @@ static int __init camellia_aesni_init(void)
562{ 562{
563 const char *feature_name; 563 const char *feature_name;
564 564
565 if (!boot_cpu_has(X86_FEATURE_AVX2) || !cpu_has_avx || 565 if (!boot_cpu_has(X86_FEATURE_AVX) ||
566 !boot_cpu_has(X86_FEATURE_AVX2) ||
566 !boot_cpu_has(X86_FEATURE_AES) || 567 !boot_cpu_has(X86_FEATURE_AES) ||
567 !boot_cpu_has(X86_FEATURE_OSXSAVE)) { 568 !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
568 pr_info("AVX2 or AES-NI instructions are not detected.\n"); 569 pr_info("AVX2 or AES-NI instructions are not detected.\n");
diff --git a/arch/x86/crypto/camellia_aesni_avx_glue.c b/arch/x86/crypto/camellia_aesni_avx_glue.c
index 6d256d59c5fd..d96429da88eb 100644
--- a/arch/x86/crypto/camellia_aesni_avx_glue.c
+++ b/arch/x86/crypto/camellia_aesni_avx_glue.c
@@ -554,7 +554,7 @@ static int __init camellia_aesni_init(void)
554{ 554{
555 const char *feature_name; 555 const char *feature_name;
556 556
557 if (!cpu_has_avx || 557 if (!boot_cpu_has(X86_FEATURE_AVX) ||
558 !boot_cpu_has(X86_FEATURE_AES) || 558 !boot_cpu_has(X86_FEATURE_AES) ||
559 !boot_cpu_has(X86_FEATURE_OSXSAVE)) { 559 !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
560 pr_info("AVX or AES-NI instructions are not detected.\n"); 560 pr_info("AVX or AES-NI instructions are not detected.\n");
diff --git a/arch/x86/crypto/chacha20_glue.c b/arch/x86/crypto/chacha20_glue.c
index cea061e137da..2d5c2e0bd939 100644
--- a/arch/x86/crypto/chacha20_glue.c
+++ b/arch/x86/crypto/chacha20_glue.c
@@ -129,7 +129,8 @@ static int __init chacha20_simd_mod_init(void)
129 return -ENODEV; 129 return -ENODEV;
130 130
131#ifdef CONFIG_AS_AVX2 131#ifdef CONFIG_AS_AVX2
132 chacha20_use_avx2 = cpu_has_avx && boot_cpu_has(X86_FEATURE_AVX2) && 132 chacha20_use_avx2 = boot_cpu_has(X86_FEATURE_AVX) &&
133 boot_cpu_has(X86_FEATURE_AVX2) &&
133 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL); 134 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL);
134#endif 135#endif
135 return crypto_register_alg(&alg); 136 return crypto_register_alg(&alg);
diff --git a/arch/x86/crypto/poly1305_glue.c b/arch/x86/crypto/poly1305_glue.c
index ea21d2e440f7..e32142bc071d 100644
--- a/arch/x86/crypto/poly1305_glue.c
+++ b/arch/x86/crypto/poly1305_glue.c
@@ -183,7 +183,8 @@ static int __init poly1305_simd_mod_init(void)
183 return -ENODEV; 183 return -ENODEV;
184 184
185#ifdef CONFIG_AS_AVX2 185#ifdef CONFIG_AS_AVX2
186 poly1305_use_avx2 = cpu_has_avx && boot_cpu_has(X86_FEATURE_AVX2) && 186 poly1305_use_avx2 = boot_cpu_has(X86_FEATURE_AVX) &&
187 boot_cpu_has(X86_FEATURE_AVX2) &&
187 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL); 188 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL);
188 alg.descsize = sizeof(struct poly1305_simd_desc_ctx); 189 alg.descsize = sizeof(struct poly1305_simd_desc_ctx);
189 if (poly1305_use_avx2) 190 if (poly1305_use_avx2)
diff --git a/arch/x86/crypto/sha1_ssse3_glue.c b/arch/x86/crypto/sha1_ssse3_glue.c
index dd14616b7739..1024e378a358 100644
--- a/arch/x86/crypto/sha1_ssse3_glue.c
+++ b/arch/x86/crypto/sha1_ssse3_glue.c
@@ -166,7 +166,7 @@ static struct shash_alg sha1_avx_alg = {
166static bool avx_usable(void) 166static bool avx_usable(void)
167{ 167{
168 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) { 168 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
169 if (cpu_has_avx) 169 if (boot_cpu_has(X86_FEATURE_AVX))
170 pr_info("AVX detected but unusable.\n"); 170 pr_info("AVX detected but unusable.\n");
171 return false; 171 return false;
172 } 172 }
diff --git a/arch/x86/crypto/sha256_ssse3_glue.c b/arch/x86/crypto/sha256_ssse3_glue.c
index 5f4d6086dc59..3ae0f43ebd37 100644
--- a/arch/x86/crypto/sha256_ssse3_glue.c
+++ b/arch/x86/crypto/sha256_ssse3_glue.c
@@ -201,7 +201,7 @@ static struct shash_alg sha256_avx_algs[] = { {
201static bool avx_usable(void) 201static bool avx_usable(void)
202{ 202{
203 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) { 203 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
204 if (cpu_has_avx) 204 if (boot_cpu_has(X86_FEATURE_AVX))
205 pr_info("AVX detected but unusable.\n"); 205 pr_info("AVX detected but unusable.\n");
206 return false; 206 return false;
207 } 207 }
diff --git a/arch/x86/crypto/sha512_ssse3_glue.c b/arch/x86/crypto/sha512_ssse3_glue.c
index 34e5083d6f36..0b17c83d027d 100644
--- a/arch/x86/crypto/sha512_ssse3_glue.c
+++ b/arch/x86/crypto/sha512_ssse3_glue.c
@@ -151,7 +151,7 @@ asmlinkage void sha512_transform_avx(u64 *digest, const char *data,
151static bool avx_usable(void) 151static bool avx_usable(void)
152{ 152{
153 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) { 153 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
154 if (cpu_has_avx) 154 if (boot_cpu_has(X86_FEATURE_AVX))
155 pr_info("AVX detected but unusable.\n"); 155 pr_info("AVX detected but unusable.\n");
156 return false; 156 return false;
157 } 157 }
diff --git a/arch/x86/include/asm/cpufeature.h b/arch/x86/include/asm/cpufeature.h
index a6627b30bf45..3b232a120a5d 100644
--- a/arch/x86/include/asm/cpufeature.h
+++ b/arch/x86/include/asm/cpufeature.h
@@ -123,7 +123,6 @@ extern const char * const x86_bug_flags[NBUGINTS*32];
123#define cpu_has_apic boot_cpu_has(X86_FEATURE_APIC) 123#define cpu_has_apic boot_cpu_has(X86_FEATURE_APIC)
124#define cpu_has_fxsr boot_cpu_has(X86_FEATURE_FXSR) 124#define cpu_has_fxsr boot_cpu_has(X86_FEATURE_FXSR)
125#define cpu_has_xmm boot_cpu_has(X86_FEATURE_XMM) 125#define cpu_has_xmm boot_cpu_has(X86_FEATURE_XMM)
126#define cpu_has_avx boot_cpu_has(X86_FEATURE_AVX)
127#define cpu_has_xsave boot_cpu_has(X86_FEATURE_XSAVE) 126#define cpu_has_xsave boot_cpu_has(X86_FEATURE_XSAVE)
128#define cpu_has_xsaves boot_cpu_has(X86_FEATURE_XSAVES) 127#define cpu_has_xsaves boot_cpu_has(X86_FEATURE_XSAVES)
129/* 128/*
diff --git a/arch/x86/include/asm/xor_avx.h b/arch/x86/include/asm/xor_avx.h
index e45e556140af..22a7b1870a31 100644
--- a/arch/x86/include/asm/xor_avx.h
+++ b/arch/x86/include/asm/xor_avx.h
@@ -167,12 +167,12 @@ static struct xor_block_template xor_block_avx = {
167 167
168#define AVX_XOR_SPEED \ 168#define AVX_XOR_SPEED \
169do { \ 169do { \
170 if (cpu_has_avx && boot_cpu_has(X86_FEATURE_OSXSAVE)) \ 170 if (boot_cpu_has(X86_FEATURE_AVX) && boot_cpu_has(X86_FEATURE_OSXSAVE)) \
171 xor_speed(&xor_block_avx); \ 171 xor_speed(&xor_block_avx); \
172} while (0) 172} while (0)
173 173
174#define AVX_SELECT(FASTEST) \ 174#define AVX_SELECT(FASTEST) \
175 (cpu_has_avx && boot_cpu_has(X86_FEATURE_OSXSAVE) ? &xor_block_avx : FASTEST) 175 (boot_cpu_has(X86_FEATURE_AVX) && boot_cpu_has(X86_FEATURE_OSXSAVE) ? &xor_block_avx : FASTEST)
176 176
177#else 177#else
178 178