aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/crypto/ghash-clmulni-intel_glue.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/crypto/ghash-clmulni-intel_glue.c')
-rw-r--r--arch/x86/crypto/ghash-clmulni-intel_glue.c8
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/x86/crypto/ghash-clmulni-intel_glue.c b/arch/x86/crypto/ghash-clmulni-intel_glue.c
index 65d409644d72..cbcc8d8ea93a 100644
--- a/arch/x86/crypto/ghash-clmulni-intel_glue.c
+++ b/arch/x86/crypto/ghash-clmulni-intel_glue.c
@@ -159,7 +159,7 @@ static int ghash_async_init(struct ahash_request *req)
159 struct ahash_request *cryptd_req = ahash_request_ctx(req); 159 struct ahash_request *cryptd_req = ahash_request_ctx(req);
160 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; 160 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
161 161
162 if (irq_fpu_usable()) { 162 if (!irq_fpu_usable()) {
163 memcpy(cryptd_req, req, sizeof(*req)); 163 memcpy(cryptd_req, req, sizeof(*req));
164 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base); 164 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
165 return crypto_ahash_init(cryptd_req); 165 return crypto_ahash_init(cryptd_req);
@@ -177,7 +177,7 @@ static int ghash_async_update(struct ahash_request *req)
177{ 177{
178 struct ahash_request *cryptd_req = ahash_request_ctx(req); 178 struct ahash_request *cryptd_req = ahash_request_ctx(req);
179 179
180 if (irq_fpu_usable()) { 180 if (!irq_fpu_usable()) {
181 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 181 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
182 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); 182 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
183 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; 183 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
@@ -195,7 +195,7 @@ static int ghash_async_final(struct ahash_request *req)
195{ 195{
196 struct ahash_request *cryptd_req = ahash_request_ctx(req); 196 struct ahash_request *cryptd_req = ahash_request_ctx(req);
197 197
198 if (irq_fpu_usable()) { 198 if (!irq_fpu_usable()) {
199 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 199 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
200 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); 200 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
201 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; 201 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
@@ -216,7 +216,7 @@ static int ghash_async_digest(struct ahash_request *req)
216 struct ahash_request *cryptd_req = ahash_request_ctx(req); 216 struct ahash_request *cryptd_req = ahash_request_ctx(req);
217 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; 217 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
218 218
219 if (irq_fpu_usable()) { 219 if (!irq_fpu_usable()) {
220 memcpy(cryptd_req, req, sizeof(*req)); 220 memcpy(cryptd_req, req, sizeof(*req));
221 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base); 221 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
222 return crypto_ahash_digest(cryptd_req); 222 return crypto_ahash_digest(cryptd_req);