aboutsummaryrefslogtreecommitdiffstats
path: root/arch/x86/crypto
diff options
context:
space:
mode:
authorHerbert Xu <herbert@gondor.apana.org.au>2016-06-21 04:55:14 -0400
committerHerbert Xu <herbert@gondor.apana.org.au>2016-06-23 06:29:52 -0400
commit38b2f68b426429c06cdf2ae5c8a89371524db203 (patch)
treebceabceea3214feb3419f23128cb59346cf50a47 /arch/x86/crypto
parent81760ea6a95ad4c41273a71052f61b9f087b5753 (diff)
crypto: aesni - Fix cryptd reordering problem on gcm
This patch fixes an old bug where gcm requests can be reordered because some are processed by cryptd while others are processed directly in softirq context. The fix is to always postpone to cryptd if there are currently requests outstanding from the same tfm. Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/x86/crypto')
-rw-r--r--arch/x86/crypto/aesni-intel_glue.c18
1 files changed, 12 insertions, 6 deletions
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index 5b7fa1471007..9e15572ef06d 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -1098,9 +1098,12 @@ static int rfc4106_encrypt(struct aead_request *req)
1098 struct cryptd_aead **ctx = crypto_aead_ctx(tfm); 1098 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
1099 struct cryptd_aead *cryptd_tfm = *ctx; 1099 struct cryptd_aead *cryptd_tfm = *ctx;
1100 1100
1101 aead_request_set_tfm(req, irq_fpu_usable() ? 1101 tfm = &cryptd_tfm->base;
1102 cryptd_aead_child(cryptd_tfm) : 1102 if (irq_fpu_usable() && (!in_atomic() ||
1103 &cryptd_tfm->base); 1103 !cryptd_aead_queued(cryptd_tfm)))
1104 tfm = cryptd_aead_child(cryptd_tfm);
1105
1106 aead_request_set_tfm(req, tfm);
1104 1107
1105 return crypto_aead_encrypt(req); 1108 return crypto_aead_encrypt(req);
1106} 1109}
@@ -1111,9 +1114,12 @@ static int rfc4106_decrypt(struct aead_request *req)
1111 struct cryptd_aead **ctx = crypto_aead_ctx(tfm); 1114 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
1112 struct cryptd_aead *cryptd_tfm = *ctx; 1115 struct cryptd_aead *cryptd_tfm = *ctx;
1113 1116
1114 aead_request_set_tfm(req, irq_fpu_usable() ? 1117 tfm = &cryptd_tfm->base;
1115 cryptd_aead_child(cryptd_tfm) : 1118 if (irq_fpu_usable() && (!in_atomic() ||
1116 &cryptd_tfm->base); 1119 !cryptd_aead_queued(cryptd_tfm)))
1120 tfm = cryptd_aead_child(cryptd_tfm);
1121
1122 aead_request_set_tfm(req, tfm);
1117 1123
1118 return crypto_aead_decrypt(req); 1124 return crypto_aead_decrypt(req);
1119} 1125}