aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2019-04-13 01:33:12 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2019-04-18 10:15:04 -0400
commit626ddb2fbe7931a2996bd7fe88bd1ffd5daf7143 (patch)
tree132a14ae7204569d22896438fda6204af098a1e1
parent0edf8593eb0985c88aa668b00befcdc1183d004d (diff)
crypto: powerpc - convert to use crypto_simd_usable()
Replace all calls to in_interrupt() in the PowerPC crypto code with !crypto_simd_usable(). This causes the crypto self-tests to test the no-SIMD code paths when CONFIG_CRYPTO_MANAGER_EXTRA_TESTS=y. The p8_ghash algorithm is currently failing and needs to be fixed, as it produces the wrong digest when no-SIMD updates are mixed with SIMD ones. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--arch/powerpc/crypto/crc32c-vpmsum_glue.c4
-rw-r--r--arch/powerpc/crypto/crct10dif-vpmsum_glue.c4
-rw-r--r--arch/powerpc/include/asm/Kbuild1
-rw-r--r--drivers/crypto/vmx/aes.c7
-rw-r--r--drivers/crypto/vmx/aes_cbc.c7
-rw-r--r--drivers/crypto/vmx/aes_ctr.c5
-rw-r--r--drivers/crypto/vmx/aes_xts.c5
-rw-r--r--drivers/crypto/vmx/ghash.c9
8 files changed, 25 insertions, 17 deletions
diff --git a/arch/powerpc/crypto/crc32c-vpmsum_glue.c b/arch/powerpc/crypto/crc32c-vpmsum_glue.c
index fd1d6c83f0c0..c4fa242dd652 100644
--- a/arch/powerpc/crypto/crc32c-vpmsum_glue.c
+++ b/arch/powerpc/crypto/crc32c-vpmsum_glue.c
@@ -1,10 +1,12 @@
1#include <linux/crc32.h> 1#include <linux/crc32.h>
2#include <crypto/internal/hash.h> 2#include <crypto/internal/hash.h>
3#include <crypto/internal/simd.h>
3#include <linux/init.h> 4#include <linux/init.h>
4#include <linux/module.h> 5#include <linux/module.h>
5#include <linux/string.h> 6#include <linux/string.h>
6#include <linux/kernel.h> 7#include <linux/kernel.h>
7#include <linux/cpufeature.h> 8#include <linux/cpufeature.h>
9#include <asm/simd.h>
8#include <asm/switch_to.h> 10#include <asm/switch_to.h>
9 11
10#define CHKSUM_BLOCK_SIZE 1 12#define CHKSUM_BLOCK_SIZE 1
@@ -22,7 +24,7 @@ static u32 crc32c_vpmsum(u32 crc, unsigned char const *p, size_t len)
22 unsigned int prealign; 24 unsigned int prealign;
23 unsigned int tail; 25 unsigned int tail;
24 26
25 if (len < (VECTOR_BREAKPOINT + VMX_ALIGN) || in_interrupt()) 27 if (len < (VECTOR_BREAKPOINT + VMX_ALIGN) || !crypto_simd_usable())
26 return __crc32c_le(crc, p, len); 28 return __crc32c_le(crc, p, len);
27 29
28 if ((unsigned long)p & VMX_ALIGN_MASK) { 30 if ((unsigned long)p & VMX_ALIGN_MASK) {
diff --git a/arch/powerpc/crypto/crct10dif-vpmsum_glue.c b/arch/powerpc/crypto/crct10dif-vpmsum_glue.c
index 02ea277863d1..e27ff16573b5 100644
--- a/arch/powerpc/crypto/crct10dif-vpmsum_glue.c
+++ b/arch/powerpc/crypto/crct10dif-vpmsum_glue.c
@@ -12,11 +12,13 @@
12 12
13#include <linux/crc-t10dif.h> 13#include <linux/crc-t10dif.h>
14#include <crypto/internal/hash.h> 14#include <crypto/internal/hash.h>
15#include <crypto/internal/simd.h>
15#include <linux/init.h> 16#include <linux/init.h>
16#include <linux/module.h> 17#include <linux/module.h>
17#include <linux/string.h> 18#include <linux/string.h>
18#include <linux/kernel.h> 19#include <linux/kernel.h>
19#include <linux/cpufeature.h> 20#include <linux/cpufeature.h>
21#include <asm/simd.h>
20#include <asm/switch_to.h> 22#include <asm/switch_to.h>
21 23
22#define VMX_ALIGN 16 24#define VMX_ALIGN 16
@@ -32,7 +34,7 @@ static u16 crct10dif_vpmsum(u16 crci, unsigned char const *p, size_t len)
32 unsigned int tail; 34 unsigned int tail;
33 u32 crc = crci; 35 u32 crc = crci;
34 36
35 if (len < (VECTOR_BREAKPOINT + VMX_ALIGN) || in_interrupt()) 37 if (len < (VECTOR_BREAKPOINT + VMX_ALIGN) || !crypto_simd_usable())
36 return crc_t10dif_generic(crc, p, len); 38 return crc_t10dif_generic(crc, p, len);
37 39
38 if ((unsigned long)p & VMX_ALIGN_MASK) { 40 if ((unsigned long)p & VMX_ALIGN_MASK) {
diff --git a/arch/powerpc/include/asm/Kbuild b/arch/powerpc/include/asm/Kbuild
index a0c132bedfae..5ac3dead6952 100644
--- a/arch/powerpc/include/asm/Kbuild
+++ b/arch/powerpc/include/asm/Kbuild
@@ -11,3 +11,4 @@ generic-y += preempt.h
11generic-y += rwsem.h 11generic-y += rwsem.h
12generic-y += vtime.h 12generic-y += vtime.h
13generic-y += msi.h 13generic-y += msi.h
14generic-y += simd.h
diff --git a/drivers/crypto/vmx/aes.c b/drivers/crypto/vmx/aes.c
index b00d6947e02f..603a62081994 100644
--- a/drivers/crypto/vmx/aes.c
+++ b/drivers/crypto/vmx/aes.c
@@ -23,9 +23,10 @@
23#include <linux/err.h> 23#include <linux/err.h>
24#include <linux/crypto.h> 24#include <linux/crypto.h>
25#include <linux/delay.h> 25#include <linux/delay.h>
26#include <linux/hardirq.h> 26#include <asm/simd.h>
27#include <asm/switch_to.h> 27#include <asm/switch_to.h>
28#include <crypto/aes.h> 28#include <crypto/aes.h>
29#include <crypto/internal/simd.h>
29 30
30#include "aesp8-ppc.h" 31#include "aesp8-ppc.h"
31 32
@@ -92,7 +93,7 @@ static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
92{ 93{
93 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 94 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
94 95
95 if (in_interrupt()) { 96 if (!crypto_simd_usable()) {
96 crypto_cipher_encrypt_one(ctx->fallback, dst, src); 97 crypto_cipher_encrypt_one(ctx->fallback, dst, src);
97 } else { 98 } else {
98 preempt_disable(); 99 preempt_disable();
@@ -109,7 +110,7 @@ static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
109{ 110{
110 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm); 111 struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
111 112
112 if (in_interrupt()) { 113 if (!crypto_simd_usable()) {
113 crypto_cipher_decrypt_one(ctx->fallback, dst, src); 114 crypto_cipher_decrypt_one(ctx->fallback, dst, src);
114 } else { 115 } else {
115 preempt_disable(); 116 preempt_disable();
diff --git a/drivers/crypto/vmx/aes_cbc.c b/drivers/crypto/vmx/aes_cbc.c
index fbe882ef1bc5..a1a9a6f0d42c 100644
--- a/drivers/crypto/vmx/aes_cbc.c
+++ b/drivers/crypto/vmx/aes_cbc.c
@@ -23,9 +23,10 @@
23#include <linux/err.h> 23#include <linux/err.h>
24#include <linux/crypto.h> 24#include <linux/crypto.h>
25#include <linux/delay.h> 25#include <linux/delay.h>
26#include <linux/hardirq.h> 26#include <asm/simd.h>
27#include <asm/switch_to.h> 27#include <asm/switch_to.h>
28#include <crypto/aes.h> 28#include <crypto/aes.h>
29#include <crypto/internal/simd.h>
29#include <crypto/scatterwalk.h> 30#include <crypto/scatterwalk.h>
30#include <crypto/skcipher.h> 31#include <crypto/skcipher.h>
31 32
@@ -100,7 +101,7 @@ static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc,
100 struct p8_aes_cbc_ctx *ctx = 101 struct p8_aes_cbc_ctx *ctx =
101 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 102 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
102 103
103 if (in_interrupt()) { 104 if (!crypto_simd_usable()) {
104 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 105 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
105 skcipher_request_set_sync_tfm(req, ctx->fallback); 106 skcipher_request_set_sync_tfm(req, ctx->fallback);
106 skcipher_request_set_callback(req, desc->flags, NULL, NULL); 107 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
@@ -139,7 +140,7 @@ static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
139 struct p8_aes_cbc_ctx *ctx = 140 struct p8_aes_cbc_ctx *ctx =
140 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 141 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
141 142
142 if (in_interrupt()) { 143 if (!crypto_simd_usable()) {
143 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 144 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
144 skcipher_request_set_sync_tfm(req, ctx->fallback); 145 skcipher_request_set_sync_tfm(req, ctx->fallback);
145 skcipher_request_set_callback(req, desc->flags, NULL, NULL); 146 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
diff --git a/drivers/crypto/vmx/aes_ctr.c b/drivers/crypto/vmx/aes_ctr.c
index 214c69db9ebd..192a53512f5e 100644
--- a/drivers/crypto/vmx/aes_ctr.c
+++ b/drivers/crypto/vmx/aes_ctr.c
@@ -23,9 +23,10 @@
23#include <linux/err.h> 23#include <linux/err.h>
24#include <linux/crypto.h> 24#include <linux/crypto.h>
25#include <linux/delay.h> 25#include <linux/delay.h>
26#include <linux/hardirq.h> 26#include <asm/simd.h>
27#include <asm/switch_to.h> 27#include <asm/switch_to.h>
28#include <crypto/aes.h> 28#include <crypto/aes.h>
29#include <crypto/internal/simd.h>
29#include <crypto/scatterwalk.h> 30#include <crypto/scatterwalk.h>
30#include <crypto/skcipher.h> 31#include <crypto/skcipher.h>
31 32
@@ -119,7 +120,7 @@ static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
119 struct p8_aes_ctr_ctx *ctx = 120 struct p8_aes_ctr_ctx *ctx =
120 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 121 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
121 122
122 if (in_interrupt()) { 123 if (!crypto_simd_usable()) {
123 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 124 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
124 skcipher_request_set_sync_tfm(req, ctx->fallback); 125 skcipher_request_set_sync_tfm(req, ctx->fallback);
125 skcipher_request_set_callback(req, desc->flags, NULL, NULL); 126 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
diff --git a/drivers/crypto/vmx/aes_xts.c b/drivers/crypto/vmx/aes_xts.c
index 5bf4c3856650..00d412d811ae 100644
--- a/drivers/crypto/vmx/aes_xts.c
+++ b/drivers/crypto/vmx/aes_xts.c
@@ -23,9 +23,10 @@
23#include <linux/err.h> 23#include <linux/err.h>
24#include <linux/crypto.h> 24#include <linux/crypto.h>
25#include <linux/delay.h> 25#include <linux/delay.h>
26#include <linux/hardirq.h> 26#include <asm/simd.h>
27#include <asm/switch_to.h> 27#include <asm/switch_to.h>
28#include <crypto/aes.h> 28#include <crypto/aes.h>
29#include <crypto/internal/simd.h>
29#include <crypto/scatterwalk.h> 30#include <crypto/scatterwalk.h>
30#include <crypto/xts.h> 31#include <crypto/xts.h>
31#include <crypto/skcipher.h> 32#include <crypto/skcipher.h>
@@ -109,7 +110,7 @@ static int p8_aes_xts_crypt(struct blkcipher_desc *desc,
109 struct p8_aes_xts_ctx *ctx = 110 struct p8_aes_xts_ctx *ctx =
110 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); 111 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
111 112
112 if (in_interrupt()) { 113 if (!crypto_simd_usable()) {
113 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback); 114 SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
114 skcipher_request_set_sync_tfm(req, ctx->fallback); 115 skcipher_request_set_sync_tfm(req, ctx->fallback);
115 skcipher_request_set_callback(req, desc->flags, NULL, NULL); 116 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
diff --git a/drivers/crypto/vmx/ghash.c b/drivers/crypto/vmx/ghash.c
index dd8b8716467a..611ff591410e 100644
--- a/drivers/crypto/vmx/ghash.c
+++ b/drivers/crypto/vmx/ghash.c
@@ -23,16 +23,15 @@
23#include <linux/err.h> 23#include <linux/err.h>
24#include <linux/crypto.h> 24#include <linux/crypto.h>
25#include <linux/delay.h> 25#include <linux/delay.h>
26#include <linux/hardirq.h> 26#include <asm/simd.h>
27#include <asm/switch_to.h> 27#include <asm/switch_to.h>
28#include <crypto/aes.h> 28#include <crypto/aes.h>
29#include <crypto/ghash.h> 29#include <crypto/ghash.h>
30#include <crypto/scatterwalk.h> 30#include <crypto/scatterwalk.h>
31#include <crypto/internal/hash.h> 31#include <crypto/internal/hash.h>
32#include <crypto/internal/simd.h>
32#include <crypto/b128ops.h> 33#include <crypto/b128ops.h>
33 34
34#define IN_INTERRUPT in_interrupt()
35
36void gcm_init_p8(u128 htable[16], const u64 Xi[2]); 35void gcm_init_p8(u128 htable[16], const u64 Xi[2]);
37void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]); 36void gcm_gmult_p8(u64 Xi[2], const u128 htable[16]);
38void gcm_ghash_p8(u64 Xi[2], const u128 htable[16], 37void gcm_ghash_p8(u64 Xi[2], const u128 htable[16],
@@ -131,7 +130,7 @@ static int p8_ghash_update(struct shash_desc *desc,
131 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 130 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
132 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 131 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
133 132
134 if (IN_INTERRUPT) { 133 if (!crypto_simd_usable()) {
135 return crypto_shash_update(&dctx->fallback_desc, src, 134 return crypto_shash_update(&dctx->fallback_desc, src,
136 srclen); 135 srclen);
137 } else { 136 } else {
@@ -182,7 +181,7 @@ static int p8_ghash_final(struct shash_desc *desc, u8 *out)
182 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm)); 181 struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
183 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); 182 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc);
184 183
185 if (IN_INTERRUPT) { 184 if (!crypto_simd_usable()) {
186 return crypto_shash_final(&dctx->fallback_desc, out); 185 return crypto_shash_final(&dctx->fallback_desc, out);
187 } else { 186 } else {
188 if (dctx->bytes) { 187 if (dctx->bytes) {